]> git.proxmox.com Git - rustc.git/blame - src/librustc_borrowck/borrowck/move_data.rs
New upstream version 1.13.0+dfsg1
[rustc.git] / src / librustc_borrowck / borrowck / move_data.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! Data structures used for tracking moves. Please see the extensive
c34b1796 12//! comments in the section "Moves and initialization" in `README.md`.
1a4d82fc
JJ
13
14pub use self::MoveKind::*;
15
16use borrowck::*;
54a0048b 17use rustc::cfg;
1a4d82fc
JJ
18use rustc::middle::dataflow::DataFlowContext;
19use rustc::middle::dataflow::BitwiseOperator;
20use rustc::middle::dataflow::DataFlowOperator;
9346a6ac 21use rustc::middle::dataflow::KillFrom;
1a4d82fc 22use rustc::middle::expr_use_visitor as euv;
9cc50fc6 23use rustc::middle::expr_use_visitor::MutateMode;
9e0c209e
SL
24use rustc::middle::mem_categorization as mc;
25use rustc::ty::{self, TyCtxt};
1a4d82fc 26use rustc::util::nodemap::{FnvHashMap, NodeSet};
62682a34 27
1a4d82fc
JJ
28use std::cell::RefCell;
29use std::rc::Rc;
85aaf69f 30use std::usize;
1a4d82fc 31use syntax::ast;
3157f602 32use syntax_pos::Span;
54a0048b
SL
33use rustc::hir;
34use rustc::hir::intravisit::IdRange;
1a4d82fc
JJ
35
36#[path="fragments.rs"]
37pub mod fragments;
38
39pub struct MoveData<'tcx> {
c34b1796 40 /// Move paths. See section "Move paths" in `README.md`.
1a4d82fc
JJ
41 pub paths: RefCell<Vec<MovePath<'tcx>>>,
42
43 /// Cache of loan path to move path index, for easy lookup.
44 pub path_map: RefCell<FnvHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
45
46 /// Each move or uninitialized variable gets an entry here.
47 pub moves: RefCell<Vec<Move>>,
48
49 /// Assignments to a variable, like `x = foo`. These are assigned
50 /// bits for dataflow, since we must track them to ensure that
51 /// immutable variables are assigned at most once along each path.
52 pub var_assignments: RefCell<Vec<Assignment>>,
53
54 /// Assignments to a path, like `x.f = foo`. These are not
55 /// assigned dataflow bits, but we track them because they still
56 /// kill move bits.
57 pub path_assignments: RefCell<Vec<Assignment>>,
58
59 /// Enum variant matched within a pattern on some match arm, like
60 /// `SomeStruct{ f: Variant1(x, y) } => ...`
61 pub variant_matches: RefCell<Vec<VariantMatch>>,
62
63 /// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
64 pub assignee_ids: RefCell<NodeSet>,
65
66 /// Path-fragments from moves in to or out of parts of structured data.
67 pub fragments: RefCell<fragments::FragmentSets>,
68}
69
70pub struct FlowedMoveData<'a, 'tcx: 'a> {
71 pub move_data: MoveData<'tcx>,
72
73 pub dfcx_moves: MoveDataFlow<'a, 'tcx>,
74
75 // We could (and maybe should, for efficiency) combine both move
76 // and assign data flow into one, but this way it's easier to
77 // distinguish the bits that correspond to moves and assignments.
78 pub dfcx_assign: AssignDataFlow<'a, 'tcx>
79}
80
81/// Index into `MoveData.paths`, used like a pointer
85aaf69f 82#[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
c34b1796 83pub struct MovePathIndex(usize);
1a4d82fc
JJ
84
85impl MovePathIndex {
c34b1796 86 fn get(&self) -> usize {
1a4d82fc
JJ
87 let MovePathIndex(v) = *self; v
88 }
89}
90
91impl Clone for MovePathIndex {
92 fn clone(&self) -> MovePathIndex {
93 MovePathIndex(self.get())
94 }
95}
96
97#[allow(non_upper_case_globals)]
c34b1796 98const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
1a4d82fc
JJ
99
100/// Index into `MoveData.moves`, used like a pointer
c34b1796
AL
101#[derive(Copy, Clone, PartialEq)]
102pub struct MoveIndex(usize);
1a4d82fc
JJ
103
104impl MoveIndex {
c34b1796 105 fn get(&self) -> usize {
1a4d82fc
JJ
106 let MoveIndex(v) = *self; v
107 }
108}
109
110#[allow(non_upper_case_globals)]
c34b1796 111const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
1a4d82fc
JJ
112
113pub struct MovePath<'tcx> {
114 /// Loan path corresponding to this move path
115 pub loan_path: Rc<LoanPath<'tcx>>,
116
117 /// Parent pointer, `InvalidMovePathIndex` if root
118 pub parent: MovePathIndex,
119
120 /// Head of linked list of moves to this path,
121 /// `InvalidMoveIndex` if not moved
122 pub first_move: MoveIndex,
123
124 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
125 pub first_child: MovePathIndex,
126
127 /// Next node in linked list of parent's children (siblings),
128 /// `InvalidMovePathIndex` if none.
129 pub next_sibling: MovePathIndex,
130}
131
c34b1796 132#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
133pub enum MoveKind {
134 Declared, // When declared, variables start out "moved".
135 MoveExpr, // Expression or binding that moves a variable
136 MovePat, // By-move binding
137 Captured // Closure creation that moves a value
138}
139
c34b1796 140#[derive(Copy, Clone)]
1a4d82fc
JJ
141pub struct Move {
142 /// Path being moved.
143 pub path: MovePathIndex,
144
145 /// id of node that is doing the move.
146 pub id: ast::NodeId,
147
148 /// Kind of move, for error messages.
149 pub kind: MoveKind,
150
151 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
152 pub next_move: MoveIndex
153}
154
c34b1796 155#[derive(Copy, Clone)]
1a4d82fc
JJ
156pub struct Assignment {
157 /// Path being assigned.
158 pub path: MovePathIndex,
159
160 /// id where assignment occurs
161 pub id: ast::NodeId,
162
163 /// span of node where assignment occurs
164 pub span: Span,
c1a9b12d
SL
165
166 /// id for l-value expression on lhs of assignment
167 pub assignee_id: ast::NodeId,
1a4d82fc
JJ
168}
169
c34b1796 170#[derive(Copy, Clone)]
1a4d82fc
JJ
171pub struct VariantMatch {
172 /// downcast to the variant.
173 pub path: MovePathIndex,
174
175 /// path being downcast to the variant.
176 pub base_path: MovePathIndex,
177
178 /// id where variant's pattern occurs
179 pub id: ast::NodeId,
180
181 /// says if variant established by move (and why), by copy, or by borrow.
182 pub mode: euv::MatchMode
183}
184
185#[derive(Clone, Copy)]
186pub struct MoveDataFlowOperator;
187
188pub type MoveDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, MoveDataFlowOperator>;
189
190#[derive(Clone, Copy)]
191pub struct AssignDataFlowOperator;
192
193pub type AssignDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, AssignDataFlowOperator>;
194
195fn loan_path_is_precise(loan_path: &LoanPath) -> bool {
196 match loan_path.kind {
197 LpVar(_) | LpUpvar(_) => {
198 true
199 }
9e0c209e 200 LpExtend(.., LpInterior(_, InteriorKind::InteriorElement(..))) => {
85aaf69f 201 // Paths involving element accesses a[i] do not refer to a unique
1a4d82fc 202 // location, as there is no accurate tracking of the indices.
85aaf69f
SL
203 //
204 // (Paths involving element accesses via slice pattern bindings
205 // can in principle be tracked precisely, but that is future
206 // work. For now, continue claiming that they are imprecise.)
1a4d82fc
JJ
207 false
208 }
209 LpDowncast(ref lp_base, _) |
9e0c209e 210 LpExtend(ref lp_base, ..) => {
7453a54e 211 loan_path_is_precise(&lp_base)
1a4d82fc
JJ
212 }
213 }
214}
215
a7813a04 216impl<'a, 'tcx> MoveData<'tcx> {
1a4d82fc
JJ
217 pub fn new() -> MoveData<'tcx> {
218 MoveData {
219 paths: RefCell::new(Vec::new()),
85aaf69f 220 path_map: RefCell::new(FnvHashMap()),
1a4d82fc
JJ
221 moves: RefCell::new(Vec::new()),
222 path_assignments: RefCell::new(Vec::new()),
223 var_assignments: RefCell::new(Vec::new()),
224 variant_matches: RefCell::new(Vec::new()),
85aaf69f 225 assignee_ids: RefCell::new(NodeSet()),
1a4d82fc
JJ
226 fragments: RefCell::new(fragments::FragmentSets::new()),
227 }
228 }
229
230 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
231 (*self.paths.borrow())[index.get()].loan_path.clone()
232 }
233
234 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
235 (*self.paths.borrow())[index.get()].parent
236 }
237
238 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
239 (*self.paths.borrow())[index.get()].first_move
240 }
241
242 /// Returns the index of first child, or `InvalidMovePathIndex` if
243 /// `index` is leaf.
244 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
245 (*self.paths.borrow())[index.get()].first_child
246 }
247
248 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
249 (*self.paths.borrow())[index.get()].next_sibling
250 }
251
252 fn set_path_first_move(&self,
253 index: MovePathIndex,
254 first_move: MoveIndex) {
255 (*self.paths.borrow_mut())[index.get()].first_move = first_move
256 }
257
258 fn set_path_first_child(&self,
259 index: MovePathIndex,
260 first_child: MovePathIndex) {
261 (*self.paths.borrow_mut())[index.get()].first_child = first_child
262 }
263
264 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
265 //! Type safe indexing operator
266 (*self.moves.borrow())[index.get()].next_move
267 }
268
269 fn is_var_path(&self, index: MovePathIndex) -> bool {
270 //! True if `index` refers to a variable
271 self.path_parent(index) == InvalidMovePathIndex
272 }
273
274 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
275 /// `lp` and any of its base paths that do not yet have an index.
a7813a04 276 pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
1a4d82fc 277 lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
3157f602
XL
278 if let Some(&index) = self.path_map.borrow().get(&lp) {
279 return index;
1a4d82fc
JJ
280 }
281
282 let index = match lp.kind {
283 LpVar(..) | LpUpvar(..) => {
284 let index = MovePathIndex(self.paths.borrow().len());
285
286 self.paths.borrow_mut().push(MovePath {
287 loan_path: lp.clone(),
288 parent: InvalidMovePathIndex,
289 first_move: InvalidMoveIndex,
290 first_child: InvalidMovePathIndex,
291 next_sibling: InvalidMovePathIndex,
292 });
293
294 index
295 }
296
297 LpDowncast(ref base, _) |
9e0c209e 298 LpExtend(ref base, ..) => {
1a4d82fc
JJ
299 let parent_index = self.move_path(tcx, base.clone());
300
301 let index = MovePathIndex(self.paths.borrow().len());
302
303 let next_sibling = self.path_first_child(parent_index);
304 self.set_path_first_child(parent_index, index);
305
306 self.paths.borrow_mut().push(MovePath {
307 loan_path: lp.clone(),
308 parent: parent_index,
309 first_move: InvalidMoveIndex,
310 first_child: InvalidMovePathIndex,
311 next_sibling: next_sibling,
312 });
313
314 index
315 }
316 };
317
62682a34
SL
318 debug!("move_path(lp={:?}, index={:?})",
319 lp,
1a4d82fc
JJ
320 index);
321
322 assert_eq!(index.get(), self.paths.borrow().len() - 1);
323 self.path_map.borrow_mut().insert(lp, index);
324 return index;
325 }
326
327 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
328 -> Option<MovePathIndex> {
329 self.path_map.borrow().get(lp).cloned()
330 }
331
332 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
333 -> Vec<MovePathIndex> {
334 let mut result = vec!();
335 self.add_existing_base_paths(lp, &mut result);
336 result
337 }
338
339 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
340 /// does not add new move paths
341 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
342 result: &mut Vec<MovePathIndex>) {
343 match self.path_map.borrow().get(lp).cloned() {
344 Some(index) => {
345 self.each_base_path(index, |p| {
346 result.push(p);
347 true
348 });
349 }
350 None => {
351 match lp.kind {
352 LpVar(..) | LpUpvar(..) => { }
353 LpDowncast(ref b, _) |
9e0c209e 354 LpExtend(ref b, ..) => {
1a4d82fc
JJ
355 self.add_existing_base_paths(b, result);
356 }
357 }
358 }
359 }
360
361 }
362
363 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
a7813a04 364 pub fn add_move(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
1a4d82fc
JJ
365 lp: Rc<LoanPath<'tcx>>,
366 id: ast::NodeId,
367 kind: MoveKind) {
9e0c209e
SL
368 // Moving one union field automatically moves all its fields.
369 if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
370 if let ty::TyAdt(adt_def, _) = base_lp.ty.sty {
371 if adt_def.is_union() {
372 for field in &adt_def.struct_variant().fields {
373 let field = InteriorKind::InteriorField(mc::NamedField(field.name));
374 let field_ty = if field == interior {
375 lp.ty
376 } else {
377 tcx.types.err // Doesn't matter
378 };
379 let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl,
380 LpInterior(opt_variant_id, field));
381 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
382 self.add_move_helper(tcx, sibling_lp, id, kind);
383 }
384 return;
385 }
386 }
387 }
388
389 self.add_move_helper(tcx, lp.clone(), id, kind);
390 }
391
392 fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
393 lp: Rc<LoanPath<'tcx>>,
394 id: ast::NodeId,
395 kind: MoveKind) {
62682a34
SL
396 debug!("add_move(lp={:?}, id={}, kind={:?})",
397 lp,
1a4d82fc
JJ
398 id,
399 kind);
400
401 let path_index = self.move_path(tcx, lp.clone());
402 let move_index = MoveIndex(self.moves.borrow().len());
403
404 self.fragments.borrow_mut().add_move(path_index);
405
406 let next_move = self.path_first_move(path_index);
407 self.set_path_first_move(path_index, move_index);
408
409 self.moves.borrow_mut().push(Move {
410 path: path_index,
411 id: id,
412 kind: kind,
413 next_move: next_move
414 });
415 }
416
417 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
418 /// `span`.
a7813a04 419 pub fn add_assignment(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
1a4d82fc
JJ
420 lp: Rc<LoanPath<'tcx>>,
421 assign_id: ast::NodeId,
422 span: Span,
423 assignee_id: ast::NodeId,
424 mode: euv::MutateMode) {
9e0c209e
SL
425 // Assigning to one union field automatically assigns to all its fields.
426 if let LpExtend(ref base_lp, mutbl, LpInterior(opt_variant_id, interior)) = lp.kind {
427 if let ty::TyAdt(adt_def, _) = base_lp.ty.sty {
428 if adt_def.is_union() {
429 for field in &adt_def.struct_variant().fields {
430 let field = InteriorKind::InteriorField(mc::NamedField(field.name));
431 let field_ty = if field == interior {
432 lp.ty
433 } else {
434 tcx.types.err // Doesn't matter
435 };
436 let sibling_lp_kind = LpExtend(base_lp.clone(), mutbl,
437 LpInterior(opt_variant_id, field));
438 let sibling_lp = Rc::new(LoanPath::new(sibling_lp_kind, field_ty));
439 self.add_assignment_helper(tcx, sibling_lp, assign_id,
440 span, assignee_id, mode);
441 }
442 return;
443 }
444 }
445 }
446
447 self.add_assignment_helper(tcx, lp.clone(), assign_id, span, assignee_id, mode);
448 }
449
450 fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
451 lp: Rc<LoanPath<'tcx>>,
452 assign_id: ast::NodeId,
453 span: Span,
454 assignee_id: ast::NodeId,
455 mode: euv::MutateMode) {
62682a34
SL
456 debug!("add_assignment(lp={:?}, assign_id={}, assignee_id={}",
457 lp, assign_id, assignee_id);
1a4d82fc
JJ
458
459 let path_index = self.move_path(tcx, lp.clone());
460
461 self.fragments.borrow_mut().add_assignment(path_index);
462
463 match mode {
9cc50fc6 464 MutateMode::Init | MutateMode::JustWrite => {
1a4d82fc
JJ
465 self.assignee_ids.borrow_mut().insert(assignee_id);
466 }
9cc50fc6 467 MutateMode::WriteAndRead => { }
1a4d82fc
JJ
468 }
469
470 let assignment = Assignment {
471 path: path_index,
472 id: assign_id,
473 span: span,
c1a9b12d 474 assignee_id: assignee_id,
1a4d82fc
JJ
475 };
476
477 if self.is_var_path(path_index) {
62682a34
SL
478 debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
479 lp, self.var_assignments.borrow().len(), path_index);
1a4d82fc
JJ
480
481 self.var_assignments.borrow_mut().push(assignment);
482 } else {
62682a34
SL
483 debug!("add_assignment[path](lp={:?}, path_index={:?})",
484 lp, path_index);
1a4d82fc
JJ
485
486 self.path_assignments.borrow_mut().push(assignment);
487 }
488 }
489
490 /// Adds a new record for a match of `base_lp`, downcast to
491 /// variant `lp`, that occurs at location `pattern_id`. (One
492 /// should be able to recover the span info from the
493 /// `pattern_id` and the ast_map, I think.)
a7813a04 494 pub fn add_variant_match(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
1a4d82fc
JJ
495 lp: Rc<LoanPath<'tcx>>,
496 pattern_id: ast::NodeId,
497 base_lp: Rc<LoanPath<'tcx>>,
498 mode: euv::MatchMode) {
62682a34
SL
499 debug!("add_variant_match(lp={:?}, pattern_id={})",
500 lp, pattern_id);
1a4d82fc
JJ
501
502 let path_index = self.move_path(tcx, lp.clone());
503 let base_path_index = self.move_path(tcx, base_lp.clone());
504
505 self.fragments.borrow_mut().add_assignment(path_index);
506
507 let variant_match = VariantMatch {
508 path: path_index,
509 base_path: base_path_index,
510 id: pattern_id,
511 mode: mode,
512 };
513
514 self.variant_matches.borrow_mut().push(variant_match);
515 }
516
a7813a04 517 fn fixup_fragment_sets(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) {
1a4d82fc
JJ
518 fragments::fixup_fragment_sets(self, tcx)
519 }
520
521 /// Adds the gen/kills for the various moves and
522 /// assignments into the provided data flow contexts.
523 /// Moves are generated by moves and killed by assignments and
524 /// scoping. Assignments are generated by assignment to variables and
c34b1796 525 /// killed by scoping. See `README.md` for more details.
a7813a04 526 fn add_gen_kills(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
1a4d82fc
JJ
527 dfcx_moves: &mut MoveDataFlow,
528 dfcx_assign: &mut AssignDataFlow) {
529 for (i, the_move) in self.moves.borrow().iter().enumerate() {
530 dfcx_moves.add_gen(the_move.id, i);
531 }
532
533 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
534 dfcx_assign.add_gen(assignment.id, i);
9346a6ac
AL
535 self.kill_moves(assignment.path, assignment.id,
536 KillFrom::Execution, dfcx_moves);
1a4d82fc
JJ
537 }
538
62682a34 539 for assignment in self.path_assignments.borrow().iter() {
9346a6ac
AL
540 self.kill_moves(assignment.path, assignment.id,
541 KillFrom::Execution, dfcx_moves);
1a4d82fc
JJ
542 }
543
544 // Kill all moves related to a variable `x` when
545 // it goes out of scope:
62682a34 546 for path in self.paths.borrow().iter() {
1a4d82fc
JJ
547 match path.loan_path.kind {
548 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
549 let kill_scope = path.loan_path.kill_scope(tcx);
c34b1796 550 let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
e9174d1e 551 self.kill_moves(path, kill_scope.node_id(&tcx.region_maps),
9346a6ac 552 KillFrom::ScopeEnd, dfcx_moves);
1a4d82fc
JJ
553 }
554 LpExtend(..) => {}
555 }
556 }
557
558 // Kill all assignments when the variable goes out of scope:
559 for (assignment_index, assignment) in
560 self.var_assignments.borrow().iter().enumerate() {
561 let lp = self.path_loan_path(assignment.path);
562 match lp.kind {
563 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
564 let kill_scope = lp.kill_scope(tcx);
9346a6ac 565 dfcx_assign.add_kill(KillFrom::ScopeEnd,
e9174d1e 566 kill_scope.node_id(&tcx.region_maps),
9346a6ac 567 assignment_index);
1a4d82fc
JJ
568 }
569 LpExtend(..) => {
54a0048b 570 bug!("var assignment for non var path");
1a4d82fc
JJ
571 }
572 }
573 }
574 }
575
576 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
577 F: FnMut(MovePathIndex) -> bool,
578 {
579 let mut p = index;
580 while p != InvalidMovePathIndex {
581 if !f(p) {
582 return false;
583 }
584 p = self.path_parent(p);
585 }
586 return true;
587 }
588
589 // FIXME(#19596) This is a workaround, but there should be better way to do this
590 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
591 F: FnMut(MovePathIndex) -> bool,
592 {
593 if !(*f)(index) {
594 return false;
595 }
596
597 let mut p = self.path_first_child(index);
598 while p != InvalidMovePathIndex {
599 if !self.each_extending_path_(p, f) {
600 return false;
601 }
602 p = self.path_next_sibling(p);
603 }
604
605 return true;
606 }
607
608 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
609 F: FnMut(MovePathIndex) -> bool,
610 {
611 self.each_extending_path_(index, &mut f)
612 }
613
614 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
615 F: FnMut(MoveIndex) -> bool,
616 {
617 let mut ret = true;
618 self.each_extending_path(index0, |index| {
619 let mut p = self.path_first_move(index);
620 while p != InvalidMoveIndex {
621 if !f(p) {
622 ret = false;
623 break;
624 }
625 p = self.move_next_move(p);
626 }
627 ret
628 });
629 ret
630 }
631
632 fn kill_moves(&self,
633 path: MovePathIndex,
634 kill_id: ast::NodeId,
9346a6ac 635 kill_kind: KillFrom,
1a4d82fc
JJ
636 dfcx_moves: &mut MoveDataFlow) {
637 // We can only perform kills for paths that refer to a unique location,
638 // since otherwise we may kill a move from one location with an
639 // assignment referring to another location.
640
641 let loan_path = self.path_loan_path(path);
7453a54e 642 if loan_path_is_precise(&loan_path) {
1a4d82fc 643 self.each_applicable_move(path, |move_index| {
9346a6ac
AL
644 debug!("kill_moves add_kill {:?} kill_id={} move_index={}",
645 kill_kind, kill_id, move_index.get());
646 dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
1a4d82fc
JJ
647 true
648 });
649 }
650 }
651}
652
653impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> {
654 pub fn new(move_data: MoveData<'tcx>,
a7813a04 655 tcx: TyCtxt<'a, 'tcx, 'tcx>,
1a4d82fc 656 cfg: &cfg::CFG,
54a0048b 657 id_range: IdRange,
e9174d1e
SL
658 decl: &hir::FnDecl,
659 body: &hir::Block)
1a4d82fc
JJ
660 -> FlowedMoveData<'a, 'tcx> {
661 let mut dfcx_moves =
662 DataFlowContext::new(tcx,
663 "flowed_move_data_moves",
664 Some(decl),
665 cfg,
666 MoveDataFlowOperator,
667 id_range,
668 move_data.moves.borrow().len());
669 let mut dfcx_assign =
670 DataFlowContext::new(tcx,
671 "flowed_move_data_assigns",
672 Some(decl),
673 cfg,
674 AssignDataFlowOperator,
675 id_range,
676 move_data.var_assignments.borrow().len());
677
678 move_data.fixup_fragment_sets(tcx);
679
680 move_data.add_gen_kills(tcx,
681 &mut dfcx_moves,
682 &mut dfcx_assign);
683
684 dfcx_moves.add_kills_from_flow_exits(cfg);
685 dfcx_assign.add_kills_from_flow_exits(cfg);
686
687 dfcx_moves.propagate(cfg, body);
688 dfcx_assign.propagate(cfg, body);
689
690 FlowedMoveData {
691 move_data: move_data,
692 dfcx_moves: dfcx_moves,
693 dfcx_assign: dfcx_assign,
694 }
695 }
696
697 pub fn kind_of_move_of_path(&self,
698 id: ast::NodeId,
699 loan_path: &Rc<LoanPath<'tcx>>)
700 -> Option<MoveKind> {
701 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
702
703 let mut ret = None;
85aaf69f 704 if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
1a4d82fc
JJ
705 self.dfcx_moves.each_gen_bit(id, |move_index| {
706 let the_move = self.move_data.moves.borrow();
707 let the_move = (*the_move)[move_index];
85aaf69f 708 if the_move.path == *loan_path_index {
1a4d82fc
JJ
709 ret = Some(the_move.kind);
710 false
711 } else {
712 true
713 }
714 });
715 }
716 ret
717 }
718
719 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
720 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
721 /// that would invalidate a reference to `loan_path` at location `id`.
722 pub fn each_move_of<F>(&self,
723 id: ast::NodeId,
724 loan_path: &Rc<LoanPath<'tcx>>,
725 mut f: F)
726 -> bool where
727 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
728 {
729 // Bad scenarios:
730 //
731 // 1. Move of `a.b.c`, use of `a.b.c`
732 // 2. Move of `a.b.c`, use of `a.b.c.d`
733 // 3. Move of `a.b.c`, use of `a` or `a.b`
734 //
735 // OK scenario:
736 //
737 // 4. move of `a.b.c`, use of `a.b.d`
738
739 let base_indices = self.move_data.existing_base_paths(loan_path);
740 if base_indices.is_empty() {
741 return true;
742 }
743
744 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
745
746 let mut ret = true;
747
748 self.dfcx_moves.each_bit_on_entry(id, |index| {
749 let the_move = self.move_data.moves.borrow();
750 let the_move = &(*the_move)[index];
751 let moved_path = the_move.path;
752 if base_indices.iter().any(|x| x == &moved_path) {
753 // Scenario 1 or 2: `loan_path` or some base path of
754 // `loan_path` was moved.
7453a54e 755 if !f(the_move, &self.move_data.path_loan_path(moved_path)) {
1a4d82fc
JJ
756 ret = false;
757 }
758 } else {
85aaf69f 759 if let Some(loan_path_index) = opt_loan_path_index {
1a4d82fc
JJ
760 let cont = self.move_data.each_base_path(moved_path, |p| {
761 if p == loan_path_index {
762 // Scenario 3: some extension of `loan_path`
763 // was moved
764 f(the_move,
7453a54e 765 &self.move_data.path_loan_path(moved_path))
1a4d82fc
JJ
766 } else {
767 true
768 }
769 });
85aaf69f 770 if !cont { ret = false; }
1a4d82fc
JJ
771 }
772 }
773 ret
774 })
775 }
776
777 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
778 /// `loan_path` must be a single variable.
779 pub fn each_assignment_of<F>(&self,
780 id: ast::NodeId,
781 loan_path: &Rc<LoanPath<'tcx>>,
782 mut f: F)
783 -> bool where
784 F: FnMut(&Assignment) -> bool,
785 {
786 let loan_path_index = {
787 match self.move_data.existing_move_path(loan_path) {
788 Some(i) => i,
789 None => {
790 // if there were any assignments, it'd have an index
791 return true;
792 }
793 }
794 };
795
796 self.dfcx_assign.each_bit_on_entry(id, |index| {
797 let assignment = self.move_data.var_assignments.borrow();
798 let assignment = &(*assignment)[index];
799 if assignment.path == loan_path_index && !f(assignment) {
800 false
801 } else {
802 true
803 }
804 })
805 }
806}
807
808impl BitwiseOperator for MoveDataFlowOperator {
809 #[inline]
c34b1796 810 fn join(&self, succ: usize, pred: usize) -> usize {
1a4d82fc
JJ
811 succ | pred // moves from both preds are in scope
812 }
813}
814
815impl DataFlowOperator for MoveDataFlowOperator {
816 #[inline]
817 fn initial_value(&self) -> bool {
818 false // no loans in scope by default
819 }
820}
821
822impl BitwiseOperator for AssignDataFlowOperator {
823 #[inline]
c34b1796 824 fn join(&self, succ: usize, pred: usize) -> usize {
1a4d82fc
JJ
825 succ | pred // moves from both preds are in scope
826 }
827}
828
829impl DataFlowOperator for AssignDataFlowOperator {
830 #[inline]
831 fn initial_value(&self) -> bool {
832 false // no assignments in scope by default
833 }
834}