]> git.proxmox.com Git - rustc.git/blob - src/librustc_borrowck/borrowck/move_data.rs
b38915612c5b0c453d83b30148a024e473acd553
[rustc.git] / src / librustc_borrowck / borrowck / move_data.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Data structures used for tracking moves. Please see the extensive
12 //! comments in the section "Moves and initialization" in `README.md`.
13
14 pub use self::MoveKind::*;
15
16 use borrowck::*;
17 use rustc::middle::cfg;
18 use rustc::middle::dataflow::DataFlowContext;
19 use rustc::middle::dataflow::BitwiseOperator;
20 use rustc::middle::dataflow::DataFlowOperator;
21 use rustc::middle::dataflow::KillFrom;
22 use rustc::middle::expr_use_visitor as euv;
23 use rustc::middle::ty;
24 use rustc::util::nodemap::{FnvHashMap, NodeSet};
25
26 use std::cell::RefCell;
27 use std::rc::Rc;
28 use std::usize;
29 use syntax::ast;
30 use syntax::ast_util;
31 use syntax::codemap::Span;
32
33 #[path="fragments.rs"]
34 pub mod fragments;
35
36 pub struct MoveData<'tcx> {
37 /// Move paths. See section "Move paths" in `README.md`.
38 pub paths: RefCell<Vec<MovePath<'tcx>>>,
39
40 /// Cache of loan path to move path index, for easy lookup.
41 pub path_map: RefCell<FnvHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
42
43 /// Each move or uninitialized variable gets an entry here.
44 pub moves: RefCell<Vec<Move>>,
45
46 /// Assignments to a variable, like `x = foo`. These are assigned
47 /// bits for dataflow, since we must track them to ensure that
48 /// immutable variables are assigned at most once along each path.
49 pub var_assignments: RefCell<Vec<Assignment>>,
50
51 /// Assignments to a path, like `x.f = foo`. These are not
52 /// assigned dataflow bits, but we track them because they still
53 /// kill move bits.
54 pub path_assignments: RefCell<Vec<Assignment>>,
55
56 /// Enum variant matched within a pattern on some match arm, like
57 /// `SomeStruct{ f: Variant1(x, y) } => ...`
58 pub variant_matches: RefCell<Vec<VariantMatch>>,
59
60 /// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
61 pub assignee_ids: RefCell<NodeSet>,
62
63 /// Path-fragments from moves in to or out of parts of structured data.
64 pub fragments: RefCell<fragments::FragmentSets>,
65 }
66
67 pub struct FlowedMoveData<'a, 'tcx: 'a> {
68 pub move_data: MoveData<'tcx>,
69
70 pub dfcx_moves: MoveDataFlow<'a, 'tcx>,
71
72 // We could (and maybe should, for efficiency) combine both move
73 // and assign data flow into one, but this way it's easier to
74 // distinguish the bits that correspond to moves and assignments.
75 pub dfcx_assign: AssignDataFlow<'a, 'tcx>
76 }
77
78 /// Index into `MoveData.paths`, used like a pointer
79 #[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
80 pub struct MovePathIndex(usize);
81
82 impl MovePathIndex {
83 fn get(&self) -> usize {
84 let MovePathIndex(v) = *self; v
85 }
86 }
87
88 impl Clone for MovePathIndex {
89 fn clone(&self) -> MovePathIndex {
90 MovePathIndex(self.get())
91 }
92 }
93
94 #[allow(non_upper_case_globals)]
95 const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
96
97 /// Index into `MoveData.moves`, used like a pointer
98 #[derive(Copy, Clone, PartialEq)]
99 pub struct MoveIndex(usize);
100
101 impl MoveIndex {
102 fn get(&self) -> usize {
103 let MoveIndex(v) = *self; v
104 }
105 }
106
107 #[allow(non_upper_case_globals)]
108 const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
109
110 pub struct MovePath<'tcx> {
111 /// Loan path corresponding to this move path
112 pub loan_path: Rc<LoanPath<'tcx>>,
113
114 /// Parent pointer, `InvalidMovePathIndex` if root
115 pub parent: MovePathIndex,
116
117 /// Head of linked list of moves to this path,
118 /// `InvalidMoveIndex` if not moved
119 pub first_move: MoveIndex,
120
121 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
122 pub first_child: MovePathIndex,
123
124 /// Next node in linked list of parent's children (siblings),
125 /// `InvalidMovePathIndex` if none.
126 pub next_sibling: MovePathIndex,
127 }
128
129 #[derive(Copy, Clone, PartialEq, Debug)]
130 pub enum MoveKind {
131 Declared, // When declared, variables start out "moved".
132 MoveExpr, // Expression or binding that moves a variable
133 MovePat, // By-move binding
134 Captured // Closure creation that moves a value
135 }
136
137 #[derive(Copy, Clone)]
138 pub struct Move {
139 /// Path being moved.
140 pub path: MovePathIndex,
141
142 /// id of node that is doing the move.
143 pub id: ast::NodeId,
144
145 /// Kind of move, for error messages.
146 pub kind: MoveKind,
147
148 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
149 pub next_move: MoveIndex
150 }
151
152 #[derive(Copy, Clone)]
153 pub struct Assignment {
154 /// Path being assigned.
155 pub path: MovePathIndex,
156
157 /// id where assignment occurs
158 pub id: ast::NodeId,
159
160 /// span of node where assignment occurs
161 pub span: Span,
162 }
163
164 #[derive(Copy, Clone)]
165 pub struct VariantMatch {
166 /// downcast to the variant.
167 pub path: MovePathIndex,
168
169 /// path being downcast to the variant.
170 pub base_path: MovePathIndex,
171
172 /// id where variant's pattern occurs
173 pub id: ast::NodeId,
174
175 /// says if variant established by move (and why), by copy, or by borrow.
176 pub mode: euv::MatchMode
177 }
178
179 #[derive(Clone, Copy)]
180 pub struct MoveDataFlowOperator;
181
182 pub type MoveDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, MoveDataFlowOperator>;
183
184 #[derive(Clone, Copy)]
185 pub struct AssignDataFlowOperator;
186
187 pub type AssignDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, AssignDataFlowOperator>;
188
189 fn loan_path_is_precise(loan_path: &LoanPath) -> bool {
190 match loan_path.kind {
191 LpVar(_) | LpUpvar(_) => {
192 true
193 }
194 LpExtend(_, _, LpInterior(InteriorKind::InteriorElement(..))) => {
195 // Paths involving element accesses a[i] do not refer to a unique
196 // location, as there is no accurate tracking of the indices.
197 //
198 // (Paths involving element accesses via slice pattern bindings
199 // can in principle be tracked precisely, but that is future
200 // work. For now, continue claiming that they are imprecise.)
201 false
202 }
203 LpDowncast(ref lp_base, _) |
204 LpExtend(ref lp_base, _, _) => {
205 loan_path_is_precise(&**lp_base)
206 }
207 }
208 }
209
210 impl<'tcx> MoveData<'tcx> {
211 pub fn new() -> MoveData<'tcx> {
212 MoveData {
213 paths: RefCell::new(Vec::new()),
214 path_map: RefCell::new(FnvHashMap()),
215 moves: RefCell::new(Vec::new()),
216 path_assignments: RefCell::new(Vec::new()),
217 var_assignments: RefCell::new(Vec::new()),
218 variant_matches: RefCell::new(Vec::new()),
219 assignee_ids: RefCell::new(NodeSet()),
220 fragments: RefCell::new(fragments::FragmentSets::new()),
221 }
222 }
223
224 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
225 (*self.paths.borrow())[index.get()].loan_path.clone()
226 }
227
228 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
229 (*self.paths.borrow())[index.get()].parent
230 }
231
232 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
233 (*self.paths.borrow())[index.get()].first_move
234 }
235
236 /// Returns the index of first child, or `InvalidMovePathIndex` if
237 /// `index` is leaf.
238 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
239 (*self.paths.borrow())[index.get()].first_child
240 }
241
242 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
243 (*self.paths.borrow())[index.get()].next_sibling
244 }
245
246 fn set_path_first_move(&self,
247 index: MovePathIndex,
248 first_move: MoveIndex) {
249 (*self.paths.borrow_mut())[index.get()].first_move = first_move
250 }
251
252 fn set_path_first_child(&self,
253 index: MovePathIndex,
254 first_child: MovePathIndex) {
255 (*self.paths.borrow_mut())[index.get()].first_child = first_child
256 }
257
258 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
259 //! Type safe indexing operator
260 (*self.moves.borrow())[index.get()].next_move
261 }
262
263 fn is_var_path(&self, index: MovePathIndex) -> bool {
264 //! True if `index` refers to a variable
265 self.path_parent(index) == InvalidMovePathIndex
266 }
267
268 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
269 /// `lp` and any of its base paths that do not yet have an index.
270 pub fn move_path(&self,
271 tcx: &ty::ctxt<'tcx>,
272 lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
273 match self.path_map.borrow().get(&lp) {
274 Some(&index) => {
275 return index;
276 }
277 None => {}
278 }
279
280 let index = match lp.kind {
281 LpVar(..) | LpUpvar(..) => {
282 let index = MovePathIndex(self.paths.borrow().len());
283
284 self.paths.borrow_mut().push(MovePath {
285 loan_path: lp.clone(),
286 parent: InvalidMovePathIndex,
287 first_move: InvalidMoveIndex,
288 first_child: InvalidMovePathIndex,
289 next_sibling: InvalidMovePathIndex,
290 });
291
292 index
293 }
294
295 LpDowncast(ref base, _) |
296 LpExtend(ref base, _, _) => {
297 let parent_index = self.move_path(tcx, base.clone());
298
299 let index = MovePathIndex(self.paths.borrow().len());
300
301 let next_sibling = self.path_first_child(parent_index);
302 self.set_path_first_child(parent_index, index);
303
304 self.paths.borrow_mut().push(MovePath {
305 loan_path: lp.clone(),
306 parent: parent_index,
307 first_move: InvalidMoveIndex,
308 first_child: InvalidMovePathIndex,
309 next_sibling: next_sibling,
310 });
311
312 index
313 }
314 };
315
316 debug!("move_path(lp={:?}, index={:?})",
317 lp,
318 index);
319
320 assert_eq!(index.get(), self.paths.borrow().len() - 1);
321 self.path_map.borrow_mut().insert(lp, index);
322 return index;
323 }
324
325 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
326 -> Option<MovePathIndex> {
327 self.path_map.borrow().get(lp).cloned()
328 }
329
330 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
331 -> Vec<MovePathIndex> {
332 let mut result = vec!();
333 self.add_existing_base_paths(lp, &mut result);
334 result
335 }
336
337 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
338 /// does not add new move paths
339 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
340 result: &mut Vec<MovePathIndex>) {
341 match self.path_map.borrow().get(lp).cloned() {
342 Some(index) => {
343 self.each_base_path(index, |p| {
344 result.push(p);
345 true
346 });
347 }
348 None => {
349 match lp.kind {
350 LpVar(..) | LpUpvar(..) => { }
351 LpDowncast(ref b, _) |
352 LpExtend(ref b, _, _) => {
353 self.add_existing_base_paths(b, result);
354 }
355 }
356 }
357 }
358
359 }
360
361 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
362 pub fn add_move(&self,
363 tcx: &ty::ctxt<'tcx>,
364 lp: Rc<LoanPath<'tcx>>,
365 id: ast::NodeId,
366 kind: MoveKind) {
367 debug!("add_move(lp={:?}, id={}, kind={:?})",
368 lp,
369 id,
370 kind);
371
372 let path_index = self.move_path(tcx, lp.clone());
373 let move_index = MoveIndex(self.moves.borrow().len());
374
375 self.fragments.borrow_mut().add_move(path_index);
376
377 let next_move = self.path_first_move(path_index);
378 self.set_path_first_move(path_index, move_index);
379
380 self.moves.borrow_mut().push(Move {
381 path: path_index,
382 id: id,
383 kind: kind,
384 next_move: next_move
385 });
386 }
387
388 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
389 /// `span`.
390 pub fn add_assignment(&self,
391 tcx: &ty::ctxt<'tcx>,
392 lp: Rc<LoanPath<'tcx>>,
393 assign_id: ast::NodeId,
394 span: Span,
395 assignee_id: ast::NodeId,
396 mode: euv::MutateMode) {
397 debug!("add_assignment(lp={:?}, assign_id={}, assignee_id={}",
398 lp, assign_id, assignee_id);
399
400 let path_index = self.move_path(tcx, lp.clone());
401
402 self.fragments.borrow_mut().add_assignment(path_index);
403
404 match mode {
405 euv::Init | euv::JustWrite => {
406 self.assignee_ids.borrow_mut().insert(assignee_id);
407 }
408 euv::WriteAndRead => { }
409 }
410
411 let assignment = Assignment {
412 path: path_index,
413 id: assign_id,
414 span: span,
415 };
416
417 if self.is_var_path(path_index) {
418 debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
419 lp, self.var_assignments.borrow().len(), path_index);
420
421 self.var_assignments.borrow_mut().push(assignment);
422 } else {
423 debug!("add_assignment[path](lp={:?}, path_index={:?})",
424 lp, path_index);
425
426 self.path_assignments.borrow_mut().push(assignment);
427 }
428 }
429
430 /// Adds a new record for a match of `base_lp`, downcast to
431 /// variant `lp`, that occurs at location `pattern_id`. (One
432 /// should be able to recover the span info from the
433 /// `pattern_id` and the ast_map, I think.)
434 pub fn add_variant_match(&self,
435 tcx: &ty::ctxt<'tcx>,
436 lp: Rc<LoanPath<'tcx>>,
437 pattern_id: ast::NodeId,
438 base_lp: Rc<LoanPath<'tcx>>,
439 mode: euv::MatchMode) {
440 debug!("add_variant_match(lp={:?}, pattern_id={})",
441 lp, pattern_id);
442
443 let path_index = self.move_path(tcx, lp.clone());
444 let base_path_index = self.move_path(tcx, base_lp.clone());
445
446 self.fragments.borrow_mut().add_assignment(path_index);
447
448 let variant_match = VariantMatch {
449 path: path_index,
450 base_path: base_path_index,
451 id: pattern_id,
452 mode: mode,
453 };
454
455 self.variant_matches.borrow_mut().push(variant_match);
456 }
457
458 fn fixup_fragment_sets(&self, tcx: &ty::ctxt<'tcx>) {
459 fragments::fixup_fragment_sets(self, tcx)
460 }
461
462 /// Adds the gen/kills for the various moves and
463 /// assignments into the provided data flow contexts.
464 /// Moves are generated by moves and killed by assignments and
465 /// scoping. Assignments are generated by assignment to variables and
466 /// killed by scoping. See `README.md` for more details.
467 fn add_gen_kills(&self,
468 tcx: &ty::ctxt<'tcx>,
469 dfcx_moves: &mut MoveDataFlow,
470 dfcx_assign: &mut AssignDataFlow) {
471 for (i, the_move) in self.moves.borrow().iter().enumerate() {
472 dfcx_moves.add_gen(the_move.id, i);
473 }
474
475 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
476 dfcx_assign.add_gen(assignment.id, i);
477 self.kill_moves(assignment.path, assignment.id,
478 KillFrom::Execution, dfcx_moves);
479 }
480
481 for assignment in self.path_assignments.borrow().iter() {
482 self.kill_moves(assignment.path, assignment.id,
483 KillFrom::Execution, dfcx_moves);
484 }
485
486 // Kill all moves related to a variable `x` when
487 // it goes out of scope:
488 for path in self.paths.borrow().iter() {
489 match path.loan_path.kind {
490 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
491 let kill_scope = path.loan_path.kill_scope(tcx);
492 let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
493 self.kill_moves(path, kill_scope.node_id(),
494 KillFrom::ScopeEnd, dfcx_moves);
495 }
496 LpExtend(..) => {}
497 }
498 }
499
500 // Kill all assignments when the variable goes out of scope:
501 for (assignment_index, assignment) in
502 self.var_assignments.borrow().iter().enumerate() {
503 let lp = self.path_loan_path(assignment.path);
504 match lp.kind {
505 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
506 let kill_scope = lp.kill_scope(tcx);
507 dfcx_assign.add_kill(KillFrom::ScopeEnd,
508 kill_scope.node_id(),
509 assignment_index);
510 }
511 LpExtend(..) => {
512 tcx.sess.bug("var assignment for non var path");
513 }
514 }
515 }
516 }
517
518 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
519 F: FnMut(MovePathIndex) -> bool,
520 {
521 let mut p = index;
522 while p != InvalidMovePathIndex {
523 if !f(p) {
524 return false;
525 }
526 p = self.path_parent(p);
527 }
528 return true;
529 }
530
531 // FIXME(#19596) This is a workaround, but there should be better way to do this
532 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
533 F: FnMut(MovePathIndex) -> bool,
534 {
535 if !(*f)(index) {
536 return false;
537 }
538
539 let mut p = self.path_first_child(index);
540 while p != InvalidMovePathIndex {
541 if !self.each_extending_path_(p, f) {
542 return false;
543 }
544 p = self.path_next_sibling(p);
545 }
546
547 return true;
548 }
549
550 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
551 F: FnMut(MovePathIndex) -> bool,
552 {
553 self.each_extending_path_(index, &mut f)
554 }
555
556 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
557 F: FnMut(MoveIndex) -> bool,
558 {
559 let mut ret = true;
560 self.each_extending_path(index0, |index| {
561 let mut p = self.path_first_move(index);
562 while p != InvalidMoveIndex {
563 if !f(p) {
564 ret = false;
565 break;
566 }
567 p = self.move_next_move(p);
568 }
569 ret
570 });
571 ret
572 }
573
574 fn kill_moves(&self,
575 path: MovePathIndex,
576 kill_id: ast::NodeId,
577 kill_kind: KillFrom,
578 dfcx_moves: &mut MoveDataFlow) {
579 // We can only perform kills for paths that refer to a unique location,
580 // since otherwise we may kill a move from one location with an
581 // assignment referring to another location.
582
583 let loan_path = self.path_loan_path(path);
584 if loan_path_is_precise(&*loan_path) {
585 self.each_applicable_move(path, |move_index| {
586 debug!("kill_moves add_kill {:?} kill_id={} move_index={}",
587 kill_kind, kill_id, move_index.get());
588 dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
589 true
590 });
591 }
592 }
593 }
594
595 impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> {
596 pub fn new(move_data: MoveData<'tcx>,
597 tcx: &'a ty::ctxt<'tcx>,
598 cfg: &cfg::CFG,
599 id_range: ast_util::IdRange,
600 decl: &ast::FnDecl,
601 body: &ast::Block)
602 -> FlowedMoveData<'a, 'tcx> {
603 let mut dfcx_moves =
604 DataFlowContext::new(tcx,
605 "flowed_move_data_moves",
606 Some(decl),
607 cfg,
608 MoveDataFlowOperator,
609 id_range,
610 move_data.moves.borrow().len());
611 let mut dfcx_assign =
612 DataFlowContext::new(tcx,
613 "flowed_move_data_assigns",
614 Some(decl),
615 cfg,
616 AssignDataFlowOperator,
617 id_range,
618 move_data.var_assignments.borrow().len());
619
620 move_data.fixup_fragment_sets(tcx);
621
622 move_data.add_gen_kills(tcx,
623 &mut dfcx_moves,
624 &mut dfcx_assign);
625
626 dfcx_moves.add_kills_from_flow_exits(cfg);
627 dfcx_assign.add_kills_from_flow_exits(cfg);
628
629 dfcx_moves.propagate(cfg, body);
630 dfcx_assign.propagate(cfg, body);
631
632 FlowedMoveData {
633 move_data: move_data,
634 dfcx_moves: dfcx_moves,
635 dfcx_assign: dfcx_assign,
636 }
637 }
638
639 pub fn kind_of_move_of_path(&self,
640 id: ast::NodeId,
641 loan_path: &Rc<LoanPath<'tcx>>)
642 -> Option<MoveKind> {
643 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
644
645 let mut ret = None;
646 if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
647 self.dfcx_moves.each_gen_bit(id, |move_index| {
648 let the_move = self.move_data.moves.borrow();
649 let the_move = (*the_move)[move_index];
650 if the_move.path == *loan_path_index {
651 ret = Some(the_move.kind);
652 false
653 } else {
654 true
655 }
656 });
657 }
658 ret
659 }
660
661 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
662 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
663 /// that would invalidate a reference to `loan_path` at location `id`.
664 pub fn each_move_of<F>(&self,
665 id: ast::NodeId,
666 loan_path: &Rc<LoanPath<'tcx>>,
667 mut f: F)
668 -> bool where
669 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
670 {
671 // Bad scenarios:
672 //
673 // 1. Move of `a.b.c`, use of `a.b.c`
674 // 2. Move of `a.b.c`, use of `a.b.c.d`
675 // 3. Move of `a.b.c`, use of `a` or `a.b`
676 //
677 // OK scenario:
678 //
679 // 4. move of `a.b.c`, use of `a.b.d`
680
681 let base_indices = self.move_data.existing_base_paths(loan_path);
682 if base_indices.is_empty() {
683 return true;
684 }
685
686 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
687
688 let mut ret = true;
689
690 self.dfcx_moves.each_bit_on_entry(id, |index| {
691 let the_move = self.move_data.moves.borrow();
692 let the_move = &(*the_move)[index];
693 let moved_path = the_move.path;
694 if base_indices.iter().any(|x| x == &moved_path) {
695 // Scenario 1 or 2: `loan_path` or some base path of
696 // `loan_path` was moved.
697 if !f(the_move, &*self.move_data.path_loan_path(moved_path)) {
698 ret = false;
699 }
700 } else {
701 if let Some(loan_path_index) = opt_loan_path_index {
702 let cont = self.move_data.each_base_path(moved_path, |p| {
703 if p == loan_path_index {
704 // Scenario 3: some extension of `loan_path`
705 // was moved
706 f(the_move,
707 &*self.move_data.path_loan_path(moved_path))
708 } else {
709 true
710 }
711 });
712 if !cont { ret = false; }
713 }
714 }
715 ret
716 })
717 }
718
719 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
720 /// `loan_path` must be a single variable.
721 pub fn each_assignment_of<F>(&self,
722 id: ast::NodeId,
723 loan_path: &Rc<LoanPath<'tcx>>,
724 mut f: F)
725 -> bool where
726 F: FnMut(&Assignment) -> bool,
727 {
728 let loan_path_index = {
729 match self.move_data.existing_move_path(loan_path) {
730 Some(i) => i,
731 None => {
732 // if there were any assignments, it'd have an index
733 return true;
734 }
735 }
736 };
737
738 self.dfcx_assign.each_bit_on_entry(id, |index| {
739 let assignment = self.move_data.var_assignments.borrow();
740 let assignment = &(*assignment)[index];
741 if assignment.path == loan_path_index && !f(assignment) {
742 false
743 } else {
744 true
745 }
746 })
747 }
748 }
749
750 impl BitwiseOperator for MoveDataFlowOperator {
751 #[inline]
752 fn join(&self, succ: usize, pred: usize) -> usize {
753 succ | pred // moves from both preds are in scope
754 }
755 }
756
757 impl DataFlowOperator for MoveDataFlowOperator {
758 #[inline]
759 fn initial_value(&self) -> bool {
760 false // no loans in scope by default
761 }
762 }
763
764 impl BitwiseOperator for AssignDataFlowOperator {
765 #[inline]
766 fn join(&self, succ: usize, pred: usize) -> usize {
767 succ | pred // moves from both preds are in scope
768 }
769 }
770
771 impl DataFlowOperator for AssignDataFlowOperator {
772 #[inline]
773 fn initial_value(&self) -> bool {
774 false // no assignments in scope by default
775 }
776 }