]> git.proxmox.com Git - rustc.git/blame - src/librustc_borrowck/borrowck/move_data.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / librustc_borrowck / borrowck / move_data.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! Data structures used for tracking moves. Please see the extensive
c34b1796 12//! comments in the section "Moves and initialization" in `README.md`.
1a4d82fc
JJ
13
14pub use self::MoveKind::*;
15
16use borrowck::*;
54a0048b 17use rustc::cfg;
1a4d82fc
JJ
18use rustc::middle::dataflow::DataFlowContext;
19use rustc::middle::dataflow::BitwiseOperator;
20use rustc::middle::dataflow::DataFlowOperator;
9346a6ac 21use rustc::middle::dataflow::KillFrom;
1a4d82fc 22use rustc::middle::expr_use_visitor as euv;
9cc50fc6 23use rustc::middle::expr_use_visitor::MutateMode;
54a0048b 24use rustc::ty::TyCtxt;
1a4d82fc 25use rustc::util::nodemap::{FnvHashMap, NodeSet};
62682a34 26
1a4d82fc
JJ
27use std::cell::RefCell;
28use std::rc::Rc;
85aaf69f 29use std::usize;
1a4d82fc 30use syntax::ast;
1a4d82fc 31use syntax::codemap::Span;
54a0048b
SL
32use rustc::hir;
33use rustc::hir::intravisit::IdRange;
1a4d82fc
JJ
34
35#[path="fragments.rs"]
36pub mod fragments;
37
38pub struct MoveData<'tcx> {
c34b1796 39 /// Move paths. See section "Move paths" in `README.md`.
1a4d82fc
JJ
40 pub paths: RefCell<Vec<MovePath<'tcx>>>,
41
42 /// Cache of loan path to move path index, for easy lookup.
43 pub path_map: RefCell<FnvHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
44
45 /// Each move or uninitialized variable gets an entry here.
46 pub moves: RefCell<Vec<Move>>,
47
48 /// Assignments to a variable, like `x = foo`. These are assigned
49 /// bits for dataflow, since we must track them to ensure that
50 /// immutable variables are assigned at most once along each path.
51 pub var_assignments: RefCell<Vec<Assignment>>,
52
53 /// Assignments to a path, like `x.f = foo`. These are not
54 /// assigned dataflow bits, but we track them because they still
55 /// kill move bits.
56 pub path_assignments: RefCell<Vec<Assignment>>,
57
58 /// Enum variant matched within a pattern on some match arm, like
59 /// `SomeStruct{ f: Variant1(x, y) } => ...`
60 pub variant_matches: RefCell<Vec<VariantMatch>>,
61
62 /// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
63 pub assignee_ids: RefCell<NodeSet>,
64
65 /// Path-fragments from moves in to or out of parts of structured data.
66 pub fragments: RefCell<fragments::FragmentSets>,
67}
68
69pub struct FlowedMoveData<'a, 'tcx: 'a> {
70 pub move_data: MoveData<'tcx>,
71
72 pub dfcx_moves: MoveDataFlow<'a, 'tcx>,
73
74 // We could (and maybe should, for efficiency) combine both move
75 // and assign data flow into one, but this way it's easier to
76 // distinguish the bits that correspond to moves and assignments.
77 pub dfcx_assign: AssignDataFlow<'a, 'tcx>
78}
79
80/// Index into `MoveData.paths`, used like a pointer
85aaf69f 81#[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
c34b1796 82pub struct MovePathIndex(usize);
1a4d82fc
JJ
83
84impl MovePathIndex {
c34b1796 85 fn get(&self) -> usize {
1a4d82fc
JJ
86 let MovePathIndex(v) = *self; v
87 }
88}
89
90impl Clone for MovePathIndex {
91 fn clone(&self) -> MovePathIndex {
92 MovePathIndex(self.get())
93 }
94}
95
96#[allow(non_upper_case_globals)]
c34b1796 97const InvalidMovePathIndex: MovePathIndex = MovePathIndex(usize::MAX);
1a4d82fc
JJ
98
99/// Index into `MoveData.moves`, used like a pointer
c34b1796
AL
100#[derive(Copy, Clone, PartialEq)]
101pub struct MoveIndex(usize);
1a4d82fc
JJ
102
103impl MoveIndex {
c34b1796 104 fn get(&self) -> usize {
1a4d82fc
JJ
105 let MoveIndex(v) = *self; v
106 }
107}
108
109#[allow(non_upper_case_globals)]
c34b1796 110const InvalidMoveIndex: MoveIndex = MoveIndex(usize::MAX);
1a4d82fc
JJ
111
112pub struct MovePath<'tcx> {
113 /// Loan path corresponding to this move path
114 pub loan_path: Rc<LoanPath<'tcx>>,
115
116 /// Parent pointer, `InvalidMovePathIndex` if root
117 pub parent: MovePathIndex,
118
119 /// Head of linked list of moves to this path,
120 /// `InvalidMoveIndex` if not moved
121 pub first_move: MoveIndex,
122
123 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
124 pub first_child: MovePathIndex,
125
126 /// Next node in linked list of parent's children (siblings),
127 /// `InvalidMovePathIndex` if none.
128 pub next_sibling: MovePathIndex,
129}
130
c34b1796 131#[derive(Copy, Clone, PartialEq, Debug)]
1a4d82fc
JJ
132pub enum MoveKind {
133 Declared, // When declared, variables start out "moved".
134 MoveExpr, // Expression or binding that moves a variable
135 MovePat, // By-move binding
136 Captured // Closure creation that moves a value
137}
138
c34b1796 139#[derive(Copy, Clone)]
1a4d82fc
JJ
140pub struct Move {
141 /// Path being moved.
142 pub path: MovePathIndex,
143
144 /// id of node that is doing the move.
145 pub id: ast::NodeId,
146
147 /// Kind of move, for error messages.
148 pub kind: MoveKind,
149
150 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
151 pub next_move: MoveIndex
152}
153
c34b1796 154#[derive(Copy, Clone)]
1a4d82fc
JJ
155pub struct Assignment {
156 /// Path being assigned.
157 pub path: MovePathIndex,
158
159 /// id where assignment occurs
160 pub id: ast::NodeId,
161
162 /// span of node where assignment occurs
163 pub span: Span,
c1a9b12d
SL
164
165 /// id for l-value expression on lhs of assignment
166 pub assignee_id: ast::NodeId,
1a4d82fc
JJ
167}
168
c34b1796 169#[derive(Copy, Clone)]
1a4d82fc
JJ
170pub struct VariantMatch {
171 /// downcast to the variant.
172 pub path: MovePathIndex,
173
174 /// path being downcast to the variant.
175 pub base_path: MovePathIndex,
176
177 /// id where variant's pattern occurs
178 pub id: ast::NodeId,
179
180 /// says if variant established by move (and why), by copy, or by borrow.
181 pub mode: euv::MatchMode
182}
183
184#[derive(Clone, Copy)]
185pub struct MoveDataFlowOperator;
186
187pub type MoveDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, MoveDataFlowOperator>;
188
189#[derive(Clone, Copy)]
190pub struct AssignDataFlowOperator;
191
192pub type AssignDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, AssignDataFlowOperator>;
193
194fn loan_path_is_precise(loan_path: &LoanPath) -> bool {
195 match loan_path.kind {
196 LpVar(_) | LpUpvar(_) => {
197 true
198 }
9cc50fc6 199 LpExtend(_, _, LpInterior(_, InteriorKind::InteriorElement(..))) => {
85aaf69f 200 // Paths involving element accesses a[i] do not refer to a unique
1a4d82fc 201 // location, as there is no accurate tracking of the indices.
85aaf69f
SL
202 //
203 // (Paths involving element accesses via slice pattern bindings
204 // can in principle be tracked precisely, but that is future
205 // work. For now, continue claiming that they are imprecise.)
1a4d82fc
JJ
206 false
207 }
208 LpDowncast(ref lp_base, _) |
209 LpExtend(ref lp_base, _, _) => {
7453a54e 210 loan_path_is_precise(&lp_base)
1a4d82fc
JJ
211 }
212 }
213}
214
215impl<'tcx> MoveData<'tcx> {
216 pub fn new() -> MoveData<'tcx> {
217 MoveData {
218 paths: RefCell::new(Vec::new()),
85aaf69f 219 path_map: RefCell::new(FnvHashMap()),
1a4d82fc
JJ
220 moves: RefCell::new(Vec::new()),
221 path_assignments: RefCell::new(Vec::new()),
222 var_assignments: RefCell::new(Vec::new()),
223 variant_matches: RefCell::new(Vec::new()),
85aaf69f 224 assignee_ids: RefCell::new(NodeSet()),
1a4d82fc
JJ
225 fragments: RefCell::new(fragments::FragmentSets::new()),
226 }
227 }
228
229 pub fn path_loan_path(&self, index: MovePathIndex) -> Rc<LoanPath<'tcx>> {
230 (*self.paths.borrow())[index.get()].loan_path.clone()
231 }
232
233 fn path_parent(&self, index: MovePathIndex) -> MovePathIndex {
234 (*self.paths.borrow())[index.get()].parent
235 }
236
237 fn path_first_move(&self, index: MovePathIndex) -> MoveIndex {
238 (*self.paths.borrow())[index.get()].first_move
239 }
240
241 /// Returns the index of first child, or `InvalidMovePathIndex` if
242 /// `index` is leaf.
243 fn path_first_child(&self, index: MovePathIndex) -> MovePathIndex {
244 (*self.paths.borrow())[index.get()].first_child
245 }
246
247 fn path_next_sibling(&self, index: MovePathIndex) -> MovePathIndex {
248 (*self.paths.borrow())[index.get()].next_sibling
249 }
250
251 fn set_path_first_move(&self,
252 index: MovePathIndex,
253 first_move: MoveIndex) {
254 (*self.paths.borrow_mut())[index.get()].first_move = first_move
255 }
256
257 fn set_path_first_child(&self,
258 index: MovePathIndex,
259 first_child: MovePathIndex) {
260 (*self.paths.borrow_mut())[index.get()].first_child = first_child
261 }
262
263 fn move_next_move(&self, index: MoveIndex) -> MoveIndex {
264 //! Type safe indexing operator
265 (*self.moves.borrow())[index.get()].next_move
266 }
267
268 fn is_var_path(&self, index: MovePathIndex) -> bool {
269 //! True if `index` refers to a variable
270 self.path_parent(index) == InvalidMovePathIndex
271 }
272
273 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
274 /// `lp` and any of its base paths that do not yet have an index.
275 pub fn move_path(&self,
54a0048b 276 tcx: &TyCtxt<'tcx>,
1a4d82fc
JJ
277 lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
278 match self.path_map.borrow().get(&lp) {
279 Some(&index) => {
280 return index;
281 }
282 None => {}
283 }
284
285 let index = match lp.kind {
286 LpVar(..) | LpUpvar(..) => {
287 let index = MovePathIndex(self.paths.borrow().len());
288
289 self.paths.borrow_mut().push(MovePath {
290 loan_path: lp.clone(),
291 parent: InvalidMovePathIndex,
292 first_move: InvalidMoveIndex,
293 first_child: InvalidMovePathIndex,
294 next_sibling: InvalidMovePathIndex,
295 });
296
297 index
298 }
299
300 LpDowncast(ref base, _) |
301 LpExtend(ref base, _, _) => {
302 let parent_index = self.move_path(tcx, base.clone());
303
304 let index = MovePathIndex(self.paths.borrow().len());
305
306 let next_sibling = self.path_first_child(parent_index);
307 self.set_path_first_child(parent_index, index);
308
309 self.paths.borrow_mut().push(MovePath {
310 loan_path: lp.clone(),
311 parent: parent_index,
312 first_move: InvalidMoveIndex,
313 first_child: InvalidMovePathIndex,
314 next_sibling: next_sibling,
315 });
316
317 index
318 }
319 };
320
62682a34
SL
321 debug!("move_path(lp={:?}, index={:?})",
322 lp,
1a4d82fc
JJ
323 index);
324
325 assert_eq!(index.get(), self.paths.borrow().len() - 1);
326 self.path_map.borrow_mut().insert(lp, index);
327 return index;
328 }
329
330 fn existing_move_path(&self, lp: &Rc<LoanPath<'tcx>>)
331 -> Option<MovePathIndex> {
332 self.path_map.borrow().get(lp).cloned()
333 }
334
335 fn existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>)
336 -> Vec<MovePathIndex> {
337 let mut result = vec!();
338 self.add_existing_base_paths(lp, &mut result);
339 result
340 }
341
342 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
343 /// does not add new move paths
344 fn add_existing_base_paths(&self, lp: &Rc<LoanPath<'tcx>>,
345 result: &mut Vec<MovePathIndex>) {
346 match self.path_map.borrow().get(lp).cloned() {
347 Some(index) => {
348 self.each_base_path(index, |p| {
349 result.push(p);
350 true
351 });
352 }
353 None => {
354 match lp.kind {
355 LpVar(..) | LpUpvar(..) => { }
356 LpDowncast(ref b, _) |
357 LpExtend(ref b, _, _) => {
358 self.add_existing_base_paths(b, result);
359 }
360 }
361 }
362 }
363
364 }
365
366 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
367 pub fn add_move(&self,
54a0048b 368 tcx: &TyCtxt<'tcx>,
1a4d82fc
JJ
369 lp: Rc<LoanPath<'tcx>>,
370 id: ast::NodeId,
371 kind: MoveKind) {
62682a34
SL
372 debug!("add_move(lp={:?}, id={}, kind={:?})",
373 lp,
1a4d82fc
JJ
374 id,
375 kind);
376
377 let path_index = self.move_path(tcx, lp.clone());
378 let move_index = MoveIndex(self.moves.borrow().len());
379
380 self.fragments.borrow_mut().add_move(path_index);
381
382 let next_move = self.path_first_move(path_index);
383 self.set_path_first_move(path_index, move_index);
384
385 self.moves.borrow_mut().push(Move {
386 path: path_index,
387 id: id,
388 kind: kind,
389 next_move: next_move
390 });
391 }
392
393 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
394 /// `span`.
395 pub fn add_assignment(&self,
54a0048b 396 tcx: &TyCtxt<'tcx>,
1a4d82fc
JJ
397 lp: Rc<LoanPath<'tcx>>,
398 assign_id: ast::NodeId,
399 span: Span,
400 assignee_id: ast::NodeId,
401 mode: euv::MutateMode) {
62682a34
SL
402 debug!("add_assignment(lp={:?}, assign_id={}, assignee_id={}",
403 lp, assign_id, assignee_id);
1a4d82fc
JJ
404
405 let path_index = self.move_path(tcx, lp.clone());
406
407 self.fragments.borrow_mut().add_assignment(path_index);
408
409 match mode {
9cc50fc6 410 MutateMode::Init | MutateMode::JustWrite => {
1a4d82fc
JJ
411 self.assignee_ids.borrow_mut().insert(assignee_id);
412 }
9cc50fc6 413 MutateMode::WriteAndRead => { }
1a4d82fc
JJ
414 }
415
416 let assignment = Assignment {
417 path: path_index,
418 id: assign_id,
419 span: span,
c1a9b12d 420 assignee_id: assignee_id,
1a4d82fc
JJ
421 };
422
423 if self.is_var_path(path_index) {
62682a34
SL
424 debug!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
425 lp, self.var_assignments.borrow().len(), path_index);
1a4d82fc
JJ
426
427 self.var_assignments.borrow_mut().push(assignment);
428 } else {
62682a34
SL
429 debug!("add_assignment[path](lp={:?}, path_index={:?})",
430 lp, path_index);
1a4d82fc
JJ
431
432 self.path_assignments.borrow_mut().push(assignment);
433 }
434 }
435
436 /// Adds a new record for a match of `base_lp`, downcast to
437 /// variant `lp`, that occurs at location `pattern_id`. (One
438 /// should be able to recover the span info from the
439 /// `pattern_id` and the ast_map, I think.)
440 pub fn add_variant_match(&self,
54a0048b 441 tcx: &TyCtxt<'tcx>,
1a4d82fc
JJ
442 lp: Rc<LoanPath<'tcx>>,
443 pattern_id: ast::NodeId,
444 base_lp: Rc<LoanPath<'tcx>>,
445 mode: euv::MatchMode) {
62682a34
SL
446 debug!("add_variant_match(lp={:?}, pattern_id={})",
447 lp, pattern_id);
1a4d82fc
JJ
448
449 let path_index = self.move_path(tcx, lp.clone());
450 let base_path_index = self.move_path(tcx, base_lp.clone());
451
452 self.fragments.borrow_mut().add_assignment(path_index);
453
454 let variant_match = VariantMatch {
455 path: path_index,
456 base_path: base_path_index,
457 id: pattern_id,
458 mode: mode,
459 };
460
461 self.variant_matches.borrow_mut().push(variant_match);
462 }
463
54a0048b 464 fn fixup_fragment_sets(&self, tcx: &TyCtxt<'tcx>) {
1a4d82fc
JJ
465 fragments::fixup_fragment_sets(self, tcx)
466 }
467
468 /// Adds the gen/kills for the various moves and
469 /// assignments into the provided data flow contexts.
470 /// Moves are generated by moves and killed by assignments and
471 /// scoping. Assignments are generated by assignment to variables and
c34b1796 472 /// killed by scoping. See `README.md` for more details.
1a4d82fc 473 fn add_gen_kills(&self,
54a0048b 474 tcx: &TyCtxt<'tcx>,
1a4d82fc
JJ
475 dfcx_moves: &mut MoveDataFlow,
476 dfcx_assign: &mut AssignDataFlow) {
477 for (i, the_move) in self.moves.borrow().iter().enumerate() {
478 dfcx_moves.add_gen(the_move.id, i);
479 }
480
481 for (i, assignment) in self.var_assignments.borrow().iter().enumerate() {
482 dfcx_assign.add_gen(assignment.id, i);
9346a6ac
AL
483 self.kill_moves(assignment.path, assignment.id,
484 KillFrom::Execution, dfcx_moves);
1a4d82fc
JJ
485 }
486
62682a34 487 for assignment in self.path_assignments.borrow().iter() {
9346a6ac
AL
488 self.kill_moves(assignment.path, assignment.id,
489 KillFrom::Execution, dfcx_moves);
1a4d82fc
JJ
490 }
491
492 // Kill all moves related to a variable `x` when
493 // it goes out of scope:
62682a34 494 for path in self.paths.borrow().iter() {
1a4d82fc
JJ
495 match path.loan_path.kind {
496 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
497 let kill_scope = path.loan_path.kill_scope(tcx);
c34b1796 498 let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
e9174d1e 499 self.kill_moves(path, kill_scope.node_id(&tcx.region_maps),
9346a6ac 500 KillFrom::ScopeEnd, dfcx_moves);
1a4d82fc
JJ
501 }
502 LpExtend(..) => {}
503 }
504 }
505
506 // Kill all assignments when the variable goes out of scope:
507 for (assignment_index, assignment) in
508 self.var_assignments.borrow().iter().enumerate() {
509 let lp = self.path_loan_path(assignment.path);
510 match lp.kind {
511 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
512 let kill_scope = lp.kill_scope(tcx);
9346a6ac 513 dfcx_assign.add_kill(KillFrom::ScopeEnd,
e9174d1e 514 kill_scope.node_id(&tcx.region_maps),
9346a6ac 515 assignment_index);
1a4d82fc
JJ
516 }
517 LpExtend(..) => {
54a0048b 518 bug!("var assignment for non var path");
1a4d82fc
JJ
519 }
520 }
521 }
522 }
523
524 fn each_base_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
525 F: FnMut(MovePathIndex) -> bool,
526 {
527 let mut p = index;
528 while p != InvalidMovePathIndex {
529 if !f(p) {
530 return false;
531 }
532 p = self.path_parent(p);
533 }
534 return true;
535 }
536
537 // FIXME(#19596) This is a workaround, but there should be better way to do this
538 fn each_extending_path_<F>(&self, index: MovePathIndex, f: &mut F) -> bool where
539 F: FnMut(MovePathIndex) -> bool,
540 {
541 if !(*f)(index) {
542 return false;
543 }
544
545 let mut p = self.path_first_child(index);
546 while p != InvalidMovePathIndex {
547 if !self.each_extending_path_(p, f) {
548 return false;
549 }
550 p = self.path_next_sibling(p);
551 }
552
553 return true;
554 }
555
556 fn each_extending_path<F>(&self, index: MovePathIndex, mut f: F) -> bool where
557 F: FnMut(MovePathIndex) -> bool,
558 {
559 self.each_extending_path_(index, &mut f)
560 }
561
562 fn each_applicable_move<F>(&self, index0: MovePathIndex, mut f: F) -> bool where
563 F: FnMut(MoveIndex) -> bool,
564 {
565 let mut ret = true;
566 self.each_extending_path(index0, |index| {
567 let mut p = self.path_first_move(index);
568 while p != InvalidMoveIndex {
569 if !f(p) {
570 ret = false;
571 break;
572 }
573 p = self.move_next_move(p);
574 }
575 ret
576 });
577 ret
578 }
579
580 fn kill_moves(&self,
581 path: MovePathIndex,
582 kill_id: ast::NodeId,
9346a6ac 583 kill_kind: KillFrom,
1a4d82fc
JJ
584 dfcx_moves: &mut MoveDataFlow) {
585 // We can only perform kills for paths that refer to a unique location,
586 // since otherwise we may kill a move from one location with an
587 // assignment referring to another location.
588
589 let loan_path = self.path_loan_path(path);
7453a54e 590 if loan_path_is_precise(&loan_path) {
1a4d82fc 591 self.each_applicable_move(path, |move_index| {
9346a6ac
AL
592 debug!("kill_moves add_kill {:?} kill_id={} move_index={}",
593 kill_kind, kill_id, move_index.get());
594 dfcx_moves.add_kill(kill_kind, kill_id, move_index.get());
1a4d82fc
JJ
595 true
596 });
597 }
598 }
599}
600
601impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> {
602 pub fn new(move_data: MoveData<'tcx>,
54a0048b 603 tcx: &'a TyCtxt<'tcx>,
1a4d82fc 604 cfg: &cfg::CFG,
54a0048b 605 id_range: IdRange,
e9174d1e
SL
606 decl: &hir::FnDecl,
607 body: &hir::Block)
1a4d82fc
JJ
608 -> FlowedMoveData<'a, 'tcx> {
609 let mut dfcx_moves =
610 DataFlowContext::new(tcx,
611 "flowed_move_data_moves",
612 Some(decl),
613 cfg,
614 MoveDataFlowOperator,
615 id_range,
616 move_data.moves.borrow().len());
617 let mut dfcx_assign =
618 DataFlowContext::new(tcx,
619 "flowed_move_data_assigns",
620 Some(decl),
621 cfg,
622 AssignDataFlowOperator,
623 id_range,
624 move_data.var_assignments.borrow().len());
625
626 move_data.fixup_fragment_sets(tcx);
627
628 move_data.add_gen_kills(tcx,
629 &mut dfcx_moves,
630 &mut dfcx_assign);
631
632 dfcx_moves.add_kills_from_flow_exits(cfg);
633 dfcx_assign.add_kills_from_flow_exits(cfg);
634
635 dfcx_moves.propagate(cfg, body);
636 dfcx_assign.propagate(cfg, body);
637
638 FlowedMoveData {
639 move_data: move_data,
640 dfcx_moves: dfcx_moves,
641 dfcx_assign: dfcx_assign,
642 }
643 }
644
645 pub fn kind_of_move_of_path(&self,
646 id: ast::NodeId,
647 loan_path: &Rc<LoanPath<'tcx>>)
648 -> Option<MoveKind> {
649 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
650
651 let mut ret = None;
85aaf69f 652 if let Some(loan_path_index) = self.move_data.path_map.borrow().get(&*loan_path) {
1a4d82fc
JJ
653 self.dfcx_moves.each_gen_bit(id, |move_index| {
654 let the_move = self.move_data.moves.borrow();
655 let the_move = (*the_move)[move_index];
85aaf69f 656 if the_move.path == *loan_path_index {
1a4d82fc
JJ
657 ret = Some(the_move.kind);
658 false
659 } else {
660 true
661 }
662 });
663 }
664 ret
665 }
666
667 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
668 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
669 /// that would invalidate a reference to `loan_path` at location `id`.
670 pub fn each_move_of<F>(&self,
671 id: ast::NodeId,
672 loan_path: &Rc<LoanPath<'tcx>>,
673 mut f: F)
674 -> bool where
675 F: FnMut(&Move, &LoanPath<'tcx>) -> bool,
676 {
677 // Bad scenarios:
678 //
679 // 1. Move of `a.b.c`, use of `a.b.c`
680 // 2. Move of `a.b.c`, use of `a.b.c.d`
681 // 3. Move of `a.b.c`, use of `a` or `a.b`
682 //
683 // OK scenario:
684 //
685 // 4. move of `a.b.c`, use of `a.b.d`
686
687 let base_indices = self.move_data.existing_base_paths(loan_path);
688 if base_indices.is_empty() {
689 return true;
690 }
691
692 let opt_loan_path_index = self.move_data.existing_move_path(loan_path);
693
694 let mut ret = true;
695
696 self.dfcx_moves.each_bit_on_entry(id, |index| {
697 let the_move = self.move_data.moves.borrow();
698 let the_move = &(*the_move)[index];
699 let moved_path = the_move.path;
700 if base_indices.iter().any(|x| x == &moved_path) {
701 // Scenario 1 or 2: `loan_path` or some base path of
702 // `loan_path` was moved.
7453a54e 703 if !f(the_move, &self.move_data.path_loan_path(moved_path)) {
1a4d82fc
JJ
704 ret = false;
705 }
706 } else {
85aaf69f 707 if let Some(loan_path_index) = opt_loan_path_index {
1a4d82fc
JJ
708 let cont = self.move_data.each_base_path(moved_path, |p| {
709 if p == loan_path_index {
710 // Scenario 3: some extension of `loan_path`
711 // was moved
712 f(the_move,
7453a54e 713 &self.move_data.path_loan_path(moved_path))
1a4d82fc
JJ
714 } else {
715 true
716 }
717 });
85aaf69f 718 if !cont { ret = false; }
1a4d82fc
JJ
719 }
720 }
721 ret
722 })
723 }
724
725 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
726 /// `loan_path` must be a single variable.
727 pub fn each_assignment_of<F>(&self,
728 id: ast::NodeId,
729 loan_path: &Rc<LoanPath<'tcx>>,
730 mut f: F)
731 -> bool where
732 F: FnMut(&Assignment) -> bool,
733 {
734 let loan_path_index = {
735 match self.move_data.existing_move_path(loan_path) {
736 Some(i) => i,
737 None => {
738 // if there were any assignments, it'd have an index
739 return true;
740 }
741 }
742 };
743
744 self.dfcx_assign.each_bit_on_entry(id, |index| {
745 let assignment = self.move_data.var_assignments.borrow();
746 let assignment = &(*assignment)[index];
747 if assignment.path == loan_path_index && !f(assignment) {
748 false
749 } else {
750 true
751 }
752 })
753 }
754}
755
756impl BitwiseOperator for MoveDataFlowOperator {
757 #[inline]
c34b1796 758 fn join(&self, succ: usize, pred: usize) -> usize {
1a4d82fc
JJ
759 succ | pred // moves from both preds are in scope
760 }
761}
762
763impl DataFlowOperator for MoveDataFlowOperator {
764 #[inline]
765 fn initial_value(&self) -> bool {
766 false // no loans in scope by default
767 }
768}
769
770impl BitwiseOperator for AssignDataFlowOperator {
771 #[inline]
c34b1796 772 fn join(&self, succ: usize, pred: usize) -> usize {
1a4d82fc
JJ
773 succ | pred // moves from both preds are in scope
774 }
775}
776
777impl DataFlowOperator for AssignDataFlowOperator {
778 #[inline]
779 fn initial_value(&self) -> bool {
780 false // no assignments in scope by default
781 }
782}