]>
git.proxmox.com Git - rustc.git/blob - src/librustc_borrowck/borrowck/move_data.rs
b38915612c5b0c453d83b30148a024e473acd553
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Data structures used for tracking moves. Please see the extensive
12 //! comments in the section "Moves and initialization" in `README.md`.
14 pub use self::MoveKind
::*;
17 use rustc
::middle
::cfg
;
18 use rustc
::middle
::dataflow
::DataFlowContext
;
19 use rustc
::middle
::dataflow
::BitwiseOperator
;
20 use rustc
::middle
::dataflow
::DataFlowOperator
;
21 use rustc
::middle
::dataflow
::KillFrom
;
22 use rustc
::middle
::expr_use_visitor
as euv
;
23 use rustc
::middle
::ty
;
24 use rustc
::util
::nodemap
::{FnvHashMap, NodeSet}
;
26 use std
::cell
::RefCell
;
31 use syntax
::codemap
::Span
;
33 #[path="fragments.rs"]
36 pub struct MoveData
<'tcx
> {
37 /// Move paths. See section "Move paths" in `README.md`.
38 pub paths
: RefCell
<Vec
<MovePath
<'tcx
>>>,
40 /// Cache of loan path to move path index, for easy lookup.
41 pub path_map
: RefCell
<FnvHashMap
<Rc
<LoanPath
<'tcx
>>, MovePathIndex
>>,
43 /// Each move or uninitialized variable gets an entry here.
44 pub moves
: RefCell
<Vec
<Move
>>,
46 /// Assignments to a variable, like `x = foo`. These are assigned
47 /// bits for dataflow, since we must track them to ensure that
48 /// immutable variables are assigned at most once along each path.
49 pub var_assignments
: RefCell
<Vec
<Assignment
>>,
51 /// Assignments to a path, like `x.f = foo`. These are not
52 /// assigned dataflow bits, but we track them because they still
54 pub path_assignments
: RefCell
<Vec
<Assignment
>>,
56 /// Enum variant matched within a pattern on some match arm, like
57 /// `SomeStruct{ f: Variant1(x, y) } => ...`
58 pub variant_matches
: RefCell
<Vec
<VariantMatch
>>,
60 /// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
61 pub assignee_ids
: RefCell
<NodeSet
>,
63 /// Path-fragments from moves in to or out of parts of structured data.
64 pub fragments
: RefCell
<fragments
::FragmentSets
>,
67 pub struct FlowedMoveData
<'a
, 'tcx
: 'a
> {
68 pub move_data
: MoveData
<'tcx
>,
70 pub dfcx_moves
: MoveDataFlow
<'a
, 'tcx
>,
72 // We could (and maybe should, for efficiency) combine both move
73 // and assign data flow into one, but this way it's easier to
74 // distinguish the bits that correspond to moves and assignments.
75 pub dfcx_assign
: AssignDataFlow
<'a
, 'tcx
>
78 /// Index into `MoveData.paths`, used like a pointer
79 #[derive(Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
80 pub struct MovePathIndex(usize);
83 fn get(&self) -> usize {
84 let MovePathIndex(v
) = *self; v
88 impl Clone
for MovePathIndex
{
89 fn clone(&self) -> MovePathIndex
{
90 MovePathIndex(self.get())
94 #[allow(non_upper_case_globals)]
95 const InvalidMovePathIndex
: MovePathIndex
= MovePathIndex(usize::MAX
);
97 /// Index into `MoveData.moves`, used like a pointer
98 #[derive(Copy, Clone, PartialEq)]
99 pub struct MoveIndex(usize);
102 fn get(&self) -> usize {
103 let MoveIndex(v
) = *self; v
107 #[allow(non_upper_case_globals)]
108 const InvalidMoveIndex
: MoveIndex
= MoveIndex(usize::MAX
);
110 pub struct MovePath
<'tcx
> {
111 /// Loan path corresponding to this move path
112 pub loan_path
: Rc
<LoanPath
<'tcx
>>,
114 /// Parent pointer, `InvalidMovePathIndex` if root
115 pub parent
: MovePathIndex
,
117 /// Head of linked list of moves to this path,
118 /// `InvalidMoveIndex` if not moved
119 pub first_move
: MoveIndex
,
121 /// First node in linked list of children, `InvalidMovePathIndex` if leaf
122 pub first_child
: MovePathIndex
,
124 /// Next node in linked list of parent's children (siblings),
125 /// `InvalidMovePathIndex` if none.
126 pub next_sibling
: MovePathIndex
,
129 #[derive(Copy, Clone, PartialEq, Debug)]
131 Declared
, // When declared, variables start out "moved".
132 MoveExpr
, // Expression or binding that moves a variable
133 MovePat
, // By-move binding
134 Captured
// Closure creation that moves a value
137 #[derive(Copy, Clone)]
139 /// Path being moved.
140 pub path
: MovePathIndex
,
142 /// id of node that is doing the move.
145 /// Kind of move, for error messages.
148 /// Next node in linked list of moves from `path`, or `InvalidMoveIndex`
149 pub next_move
: MoveIndex
152 #[derive(Copy, Clone)]
153 pub struct Assignment
{
154 /// Path being assigned.
155 pub path
: MovePathIndex
,
157 /// id where assignment occurs
160 /// span of node where assignment occurs
164 #[derive(Copy, Clone)]
165 pub struct VariantMatch
{
166 /// downcast to the variant.
167 pub path
: MovePathIndex
,
169 /// path being downcast to the variant.
170 pub base_path
: MovePathIndex
,
172 /// id where variant's pattern occurs
175 /// says if variant established by move (and why), by copy, or by borrow.
176 pub mode
: euv
::MatchMode
179 #[derive(Clone, Copy)]
180 pub struct MoveDataFlowOperator
;
182 pub type MoveDataFlow
<'a
, 'tcx
> = DataFlowContext
<'a
, 'tcx
, MoveDataFlowOperator
>;
184 #[derive(Clone, Copy)]
185 pub struct AssignDataFlowOperator
;
187 pub type AssignDataFlow
<'a
, 'tcx
> = DataFlowContext
<'a
, 'tcx
, AssignDataFlowOperator
>;
189 fn loan_path_is_precise(loan_path
: &LoanPath
) -> bool
{
190 match loan_path
.kind
{
191 LpVar(_
) | LpUpvar(_
) => {
194 LpExtend(_
, _
, LpInterior(InteriorKind
::InteriorElement(..))) => {
195 // Paths involving element accesses a[i] do not refer to a unique
196 // location, as there is no accurate tracking of the indices.
198 // (Paths involving element accesses via slice pattern bindings
199 // can in principle be tracked precisely, but that is future
200 // work. For now, continue claiming that they are imprecise.)
203 LpDowncast(ref lp_base
, _
) |
204 LpExtend(ref lp_base
, _
, _
) => {
205 loan_path_is_precise(&**lp_base
)
210 impl<'tcx
> MoveData
<'tcx
> {
211 pub fn new() -> MoveData
<'tcx
> {
213 paths
: RefCell
::new(Vec
::new()),
214 path_map
: RefCell
::new(FnvHashMap()),
215 moves
: RefCell
::new(Vec
::new()),
216 path_assignments
: RefCell
::new(Vec
::new()),
217 var_assignments
: RefCell
::new(Vec
::new()),
218 variant_matches
: RefCell
::new(Vec
::new()),
219 assignee_ids
: RefCell
::new(NodeSet()),
220 fragments
: RefCell
::new(fragments
::FragmentSets
::new()),
224 pub fn path_loan_path(&self, index
: MovePathIndex
) -> Rc
<LoanPath
<'tcx
>> {
225 (*self.paths
.borrow())[index
.get()].loan_path
.clone()
228 fn path_parent(&self, index
: MovePathIndex
) -> MovePathIndex
{
229 (*self.paths
.borrow())[index
.get()].parent
232 fn path_first_move(&self, index
: MovePathIndex
) -> MoveIndex
{
233 (*self.paths
.borrow())[index
.get()].first_move
236 /// Returns the index of first child, or `InvalidMovePathIndex` if
238 fn path_first_child(&self, index
: MovePathIndex
) -> MovePathIndex
{
239 (*self.paths
.borrow())[index
.get()].first_child
242 fn path_next_sibling(&self, index
: MovePathIndex
) -> MovePathIndex
{
243 (*self.paths
.borrow())[index
.get()].next_sibling
246 fn set_path_first_move(&self,
247 index
: MovePathIndex
,
248 first_move
: MoveIndex
) {
249 (*self.paths
.borrow_mut())[index
.get()].first_move
= first_move
252 fn set_path_first_child(&self,
253 index
: MovePathIndex
,
254 first_child
: MovePathIndex
) {
255 (*self.paths
.borrow_mut())[index
.get()].first_child
= first_child
258 fn move_next_move(&self, index
: MoveIndex
) -> MoveIndex
{
259 //! Type safe indexing operator
260 (*self.moves
.borrow())[index
.get()].next_move
263 fn is_var_path(&self, index
: MovePathIndex
) -> bool
{
264 //! True if `index` refers to a variable
265 self.path_parent(index
) == InvalidMovePathIndex
268 /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for
269 /// `lp` and any of its base paths that do not yet have an index.
270 pub fn move_path(&self,
271 tcx
: &ty
::ctxt
<'tcx
>,
272 lp
: Rc
<LoanPath
<'tcx
>>) -> MovePathIndex
{
273 match self.path_map
.borrow().get(&lp
) {
280 let index
= match lp
.kind
{
281 LpVar(..) | LpUpvar(..) => {
282 let index
= MovePathIndex(self.paths
.borrow().len());
284 self.paths
.borrow_mut().push(MovePath
{
285 loan_path
: lp
.clone(),
286 parent
: InvalidMovePathIndex
,
287 first_move
: InvalidMoveIndex
,
288 first_child
: InvalidMovePathIndex
,
289 next_sibling
: InvalidMovePathIndex
,
295 LpDowncast(ref base
, _
) |
296 LpExtend(ref base
, _
, _
) => {
297 let parent_index
= self.move_path(tcx
, base
.clone());
299 let index
= MovePathIndex(self.paths
.borrow().len());
301 let next_sibling
= self.path_first_child(parent_index
);
302 self.set_path_first_child(parent_index
, index
);
304 self.paths
.borrow_mut().push(MovePath
{
305 loan_path
: lp
.clone(),
306 parent
: parent_index
,
307 first_move
: InvalidMoveIndex
,
308 first_child
: InvalidMovePathIndex
,
309 next_sibling
: next_sibling
,
316 debug
!("move_path(lp={:?}, index={:?})",
320 assert_eq
!(index
.get(), self.paths
.borrow().len() - 1);
321 self.path_map
.borrow_mut().insert(lp
, index
);
325 fn existing_move_path(&self, lp
: &Rc
<LoanPath
<'tcx
>>)
326 -> Option
<MovePathIndex
> {
327 self.path_map
.borrow().get(lp
).cloned()
330 fn existing_base_paths(&self, lp
: &Rc
<LoanPath
<'tcx
>>)
331 -> Vec
<MovePathIndex
> {
332 let mut result
= vec
!();
333 self.add_existing_base_paths(lp
, &mut result
);
337 /// Adds any existing move path indices for `lp` and any base paths of `lp` to `result`, but
338 /// does not add new move paths
339 fn add_existing_base_paths(&self, lp
: &Rc
<LoanPath
<'tcx
>>,
340 result
: &mut Vec
<MovePathIndex
>) {
341 match self.path_map
.borrow().get(lp
).cloned() {
343 self.each_base_path(index
, |p
| {
350 LpVar(..) | LpUpvar(..) => { }
351 LpDowncast(ref b
, _
) |
352 LpExtend(ref b
, _
, _
) => {
353 self.add_existing_base_paths(b
, result
);
361 /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`.
362 pub fn add_move(&self,
363 tcx
: &ty
::ctxt
<'tcx
>,
364 lp
: Rc
<LoanPath
<'tcx
>>,
367 debug
!("add_move(lp={:?}, id={}, kind={:?})",
372 let path_index
= self.move_path(tcx
, lp
.clone());
373 let move_index
= MoveIndex(self.moves
.borrow().len());
375 self.fragments
.borrow_mut().add_move(path_index
);
377 let next_move
= self.path_first_move(path_index
);
378 self.set_path_first_move(path_index
, move_index
);
380 self.moves
.borrow_mut().push(Move
{
388 /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given
390 pub fn add_assignment(&self,
391 tcx
: &ty
::ctxt
<'tcx
>,
392 lp
: Rc
<LoanPath
<'tcx
>>,
393 assign_id
: ast
::NodeId
,
395 assignee_id
: ast
::NodeId
,
396 mode
: euv
::MutateMode
) {
397 debug
!("add_assignment(lp={:?}, assign_id={}, assignee_id={}",
398 lp
, assign_id
, assignee_id
);
400 let path_index
= self.move_path(tcx
, lp
.clone());
402 self.fragments
.borrow_mut().add_assignment(path_index
);
405 euv
::Init
| euv
::JustWrite
=> {
406 self.assignee_ids
.borrow_mut().insert(assignee_id
);
408 euv
::WriteAndRead
=> { }
411 let assignment
= Assignment
{
417 if self.is_var_path(path_index
) {
418 debug
!("add_assignment[var](lp={:?}, assignment={}, path_index={:?})",
419 lp
, self.var_assignments
.borrow().len(), path_index
);
421 self.var_assignments
.borrow_mut().push(assignment
);
423 debug
!("add_assignment[path](lp={:?}, path_index={:?})",
426 self.path_assignments
.borrow_mut().push(assignment
);
430 /// Adds a new record for a match of `base_lp`, downcast to
431 /// variant `lp`, that occurs at location `pattern_id`. (One
432 /// should be able to recover the span info from the
433 /// `pattern_id` and the ast_map, I think.)
434 pub fn add_variant_match(&self,
435 tcx
: &ty
::ctxt
<'tcx
>,
436 lp
: Rc
<LoanPath
<'tcx
>>,
437 pattern_id
: ast
::NodeId
,
438 base_lp
: Rc
<LoanPath
<'tcx
>>,
439 mode
: euv
::MatchMode
) {
440 debug
!("add_variant_match(lp={:?}, pattern_id={})",
443 let path_index
= self.move_path(tcx
, lp
.clone());
444 let base_path_index
= self.move_path(tcx
, base_lp
.clone());
446 self.fragments
.borrow_mut().add_assignment(path_index
);
448 let variant_match
= VariantMatch
{
450 base_path
: base_path_index
,
455 self.variant_matches
.borrow_mut().push(variant_match
);
458 fn fixup_fragment_sets(&self, tcx
: &ty
::ctxt
<'tcx
>) {
459 fragments
::fixup_fragment_sets(self, tcx
)
462 /// Adds the gen/kills for the various moves and
463 /// assignments into the provided data flow contexts.
464 /// Moves are generated by moves and killed by assignments and
465 /// scoping. Assignments are generated by assignment to variables and
466 /// killed by scoping. See `README.md` for more details.
467 fn add_gen_kills(&self,
468 tcx
: &ty
::ctxt
<'tcx
>,
469 dfcx_moves
: &mut MoveDataFlow
,
470 dfcx_assign
: &mut AssignDataFlow
) {
471 for (i
, the_move
) in self.moves
.borrow().iter().enumerate() {
472 dfcx_moves
.add_gen(the_move
.id
, i
);
475 for (i
, assignment
) in self.var_assignments
.borrow().iter().enumerate() {
476 dfcx_assign
.add_gen(assignment
.id
, i
);
477 self.kill_moves(assignment
.path
, assignment
.id
,
478 KillFrom
::Execution
, dfcx_moves
);
481 for assignment
in self.path_assignments
.borrow().iter() {
482 self.kill_moves(assignment
.path
, assignment
.id
,
483 KillFrom
::Execution
, dfcx_moves
);
486 // Kill all moves related to a variable `x` when
487 // it goes out of scope:
488 for path
in self.paths
.borrow().iter() {
489 match path
.loan_path
.kind
{
490 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
491 let kill_scope
= path
.loan_path
.kill_scope(tcx
);
492 let path
= *self.path_map
.borrow().get(&path
.loan_path
).unwrap();
493 self.kill_moves(path
, kill_scope
.node_id(),
494 KillFrom
::ScopeEnd
, dfcx_moves
);
500 // Kill all assignments when the variable goes out of scope:
501 for (assignment_index
, assignment
) in
502 self.var_assignments
.borrow().iter().enumerate() {
503 let lp
= self.path_loan_path(assignment
.path
);
505 LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
506 let kill_scope
= lp
.kill_scope(tcx
);
507 dfcx_assign
.add_kill(KillFrom
::ScopeEnd
,
508 kill_scope
.node_id(),
512 tcx
.sess
.bug("var assignment for non var path");
518 fn each_base_path
<F
>(&self, index
: MovePathIndex
, mut f
: F
) -> bool
where
519 F
: FnMut(MovePathIndex
) -> bool
,
522 while p
!= InvalidMovePathIndex
{
526 p
= self.path_parent(p
);
531 // FIXME(#19596) This is a workaround, but there should be better way to do this
532 fn each_extending_path_
<F
>(&self, index
: MovePathIndex
, f
: &mut F
) -> bool
where
533 F
: FnMut(MovePathIndex
) -> bool
,
539 let mut p
= self.path_first_child(index
);
540 while p
!= InvalidMovePathIndex
{
541 if !self.each_extending_path_(p
, f
) {
544 p
= self.path_next_sibling(p
);
550 fn each_extending_path
<F
>(&self, index
: MovePathIndex
, mut f
: F
) -> bool
where
551 F
: FnMut(MovePathIndex
) -> bool
,
553 self.each_extending_path_(index
, &mut f
)
556 fn each_applicable_move
<F
>(&self, index0
: MovePathIndex
, mut f
: F
) -> bool
where
557 F
: FnMut(MoveIndex
) -> bool
,
560 self.each_extending_path(index0
, |index
| {
561 let mut p
= self.path_first_move(index
);
562 while p
!= InvalidMoveIndex
{
567 p
= self.move_next_move(p
);
576 kill_id
: ast
::NodeId
,
578 dfcx_moves
: &mut MoveDataFlow
) {
579 // We can only perform kills for paths that refer to a unique location,
580 // since otherwise we may kill a move from one location with an
581 // assignment referring to another location.
583 let loan_path
= self.path_loan_path(path
);
584 if loan_path_is_precise(&*loan_path
) {
585 self.each_applicable_move(path
, |move_index
| {
586 debug
!("kill_moves add_kill {:?} kill_id={} move_index={}",
587 kill_kind
, kill_id
, move_index
.get());
588 dfcx_moves
.add_kill(kill_kind
, kill_id
, move_index
.get());
595 impl<'a
, 'tcx
> FlowedMoveData
<'a
, 'tcx
> {
596 pub fn new(move_data
: MoveData
<'tcx
>,
597 tcx
: &'a ty
::ctxt
<'tcx
>,
599 id_range
: ast_util
::IdRange
,
602 -> FlowedMoveData
<'a
, 'tcx
> {
604 DataFlowContext
::new(tcx
,
605 "flowed_move_data_moves",
608 MoveDataFlowOperator
,
610 move_data
.moves
.borrow().len());
611 let mut dfcx_assign
=
612 DataFlowContext
::new(tcx
,
613 "flowed_move_data_assigns",
616 AssignDataFlowOperator
,
618 move_data
.var_assignments
.borrow().len());
620 move_data
.fixup_fragment_sets(tcx
);
622 move_data
.add_gen_kills(tcx
,
626 dfcx_moves
.add_kills_from_flow_exits(cfg
);
627 dfcx_assign
.add_kills_from_flow_exits(cfg
);
629 dfcx_moves
.propagate(cfg
, body
);
630 dfcx_assign
.propagate(cfg
, body
);
633 move_data
: move_data
,
634 dfcx_moves
: dfcx_moves
,
635 dfcx_assign
: dfcx_assign
,
639 pub fn kind_of_move_of_path(&self,
641 loan_path
: &Rc
<LoanPath
<'tcx
>>)
642 -> Option
<MoveKind
> {
643 //! Returns the kind of a move of `loan_path` by `id`, if one exists.
646 if let Some(loan_path_index
) = self.move_data
.path_map
.borrow().get(&*loan_path
) {
647 self.dfcx_moves
.each_gen_bit(id
, |move_index
| {
648 let the_move
= self.move_data
.moves
.borrow();
649 let the_move
= (*the_move
)[move_index
];
650 if the_move
.path
== *loan_path_index
{
651 ret
= Some(the_move
.kind
);
661 /// Iterates through each move of `loan_path` (or some base path of `loan_path`) that *may*
662 /// have occurred on entry to `id` without an intervening assignment. In other words, any moves
663 /// that would invalidate a reference to `loan_path` at location `id`.
664 pub fn each_move_of
<F
>(&self,
666 loan_path
: &Rc
<LoanPath
<'tcx
>>,
669 F
: FnMut(&Move
, &LoanPath
<'tcx
>) -> bool
,
673 // 1. Move of `a.b.c`, use of `a.b.c`
674 // 2. Move of `a.b.c`, use of `a.b.c.d`
675 // 3. Move of `a.b.c`, use of `a` or `a.b`
679 // 4. move of `a.b.c`, use of `a.b.d`
681 let base_indices
= self.move_data
.existing_base_paths(loan_path
);
682 if base_indices
.is_empty() {
686 let opt_loan_path_index
= self.move_data
.existing_move_path(loan_path
);
690 self.dfcx_moves
.each_bit_on_entry(id
, |index
| {
691 let the_move
= self.move_data
.moves
.borrow();
692 let the_move
= &(*the_move
)[index
];
693 let moved_path
= the_move
.path
;
694 if base_indices
.iter().any(|x
| x
== &moved_path
) {
695 // Scenario 1 or 2: `loan_path` or some base path of
696 // `loan_path` was moved.
697 if !f(the_move
, &*self.move_data
.path_loan_path(moved_path
)) {
701 if let Some(loan_path_index
) = opt_loan_path_index
{
702 let cont
= self.move_data
.each_base_path(moved_path
, |p
| {
703 if p
== loan_path_index
{
704 // Scenario 3: some extension of `loan_path`
707 &*self.move_data
.path_loan_path(moved_path
))
712 if !cont { ret = false; }
719 /// Iterates through every assignment to `loan_path` that may have occurred on entry to `id`.
720 /// `loan_path` must be a single variable.
721 pub fn each_assignment_of
<F
>(&self,
723 loan_path
: &Rc
<LoanPath
<'tcx
>>,
726 F
: FnMut(&Assignment
) -> bool
,
728 let loan_path_index
= {
729 match self.move_data
.existing_move_path(loan_path
) {
732 // if there were any assignments, it'd have an index
738 self.dfcx_assign
.each_bit_on_entry(id
, |index
| {
739 let assignment
= self.move_data
.var_assignments
.borrow();
740 let assignment
= &(*assignment
)[index
];
741 if assignment
.path
== loan_path_index
&& !f(assignment
) {
750 impl BitwiseOperator
for MoveDataFlowOperator
{
752 fn join(&self, succ
: usize, pred
: usize) -> usize {
753 succ
| pred
// moves from both preds are in scope
757 impl DataFlowOperator
for MoveDataFlowOperator
{
759 fn initial_value(&self) -> bool
{
760 false // no loans in scope by default
764 impl BitwiseOperator
for AssignDataFlowOperator
{
766 fn join(&self, succ
: usize, pred
: usize) -> usize {
767 succ
| pred
// moves from both preds are in scope
771 impl DataFlowOperator
for AssignDataFlowOperator
{
773 fn initial_value(&self) -> bool
{
774 false // no assignments in scope by default