1 //! Dataflow analyses are built upon some interpretation of the
2 //! bitvectors attached to each basic block, represented via a
3 //! zero-sized structure.
5 use rustc_index
::bit_set
::BitSet
;
6 use rustc_index
::vec
::Idx
;
7 use rustc_middle
::mir
::{self, Body, Location}
;
8 use rustc_middle
::ty
::{self, TyCtxt}
;
9 use rustc_target
::abi
::VariantIdx
;
11 use super::MoveDataParamEnv
;
13 use crate::util
::elaborate_drops
::DropFlagState
;
15 use super::move_paths
::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex}
;
16 use super::{AnalysisDomain, BottomValue, GenKill, GenKillAnalysis}
;
18 use super::drop_flag_effects_for_function_entry
;
19 use super::drop_flag_effects_for_location
;
20 use super::on_lookup_result_bits
;
21 use crate::dataflow
::drop_flag_effects
;
26 pub use self::borrowed_locals
::*;
27 pub use self::storage_liveness
::*;
29 pub(super) mod borrows
;
31 /// `MaybeInitializedPlaces` tracks all places that might be
32 /// initialized upon reaching a particular point in the control flow
35 /// For example, in code like the following, we have corresponding
36 /// dataflow information shown in the right-hand comments.
40 /// fn foo(pred: bool) { // maybe-init:
42 /// let a = S; let b = S; let c; let d; // {a, b}
54 /// c = S; // {a, b, c, d}
58 /// To determine whether a place *must* be initialized at a
59 /// particular control-flow point, one can take the set-difference
60 /// between this data and the data from `MaybeUninitializedPlaces` at the
61 /// corresponding control-flow point.
63 /// Similarly, at a given `drop` statement, the set-intersection
64 /// between this data and `MaybeUninitializedPlaces` yields the set of
65 /// places that would require a dynamic drop-flag at that statement.
66 pub struct MaybeInitializedPlaces
<'a
, 'tcx
> {
69 mdpe
: &'a MoveDataParamEnv
<'tcx
>,
72 impl<'a
, 'tcx
> MaybeInitializedPlaces
<'a
, 'tcx
> {
73 pub fn new(tcx
: TyCtxt
<'tcx
>, body
: &'a Body
<'tcx
>, mdpe
: &'a MoveDataParamEnv
<'tcx
>) -> Self {
74 MaybeInitializedPlaces { tcx, body, mdpe }
78 impl<'a
, 'tcx
> HasMoveData
<'tcx
> for MaybeInitializedPlaces
<'a
, 'tcx
> {
79 fn move_data(&self) -> &MoveData
<'tcx
> {
84 /// `MaybeUninitializedPlaces` tracks all places that might be
85 /// uninitialized upon reaching a particular point in the control flow
88 /// For example, in code like the following, we have corresponding
89 /// dataflow information shown in the right-hand comments.
93 /// fn foo(pred: bool) { // maybe-uninit:
95 /// let a = S; let b = S; let c; let d; // { c, d}
98 /// drop(a); // {a, c, d}
99 /// b = S; // {a, c, d}
102 /// drop(b); // { b, c, d}
103 /// d = S; // { b, c }
105 /// } // {a, b, c, d}
107 /// c = S; // {a, b, d}
111 /// To determine whether a place *must* be uninitialized at a
112 /// particular control-flow point, one can take the set-difference
113 /// between this data and the data from `MaybeInitializedPlaces` at the
114 /// corresponding control-flow point.
116 /// Similarly, at a given `drop` statement, the set-intersection
117 /// between this data and `MaybeInitializedPlaces` yields the set of
118 /// places that would require a dynamic drop-flag at that statement.
119 pub struct MaybeUninitializedPlaces
<'a
, 'tcx
> {
121 body
: &'a Body
<'tcx
>,
122 mdpe
: &'a MoveDataParamEnv
<'tcx
>,
125 impl<'a
, 'tcx
> MaybeUninitializedPlaces
<'a
, 'tcx
> {
126 pub fn new(tcx
: TyCtxt
<'tcx
>, body
: &'a Body
<'tcx
>, mdpe
: &'a MoveDataParamEnv
<'tcx
>) -> Self {
127 MaybeUninitializedPlaces { tcx, body, mdpe }
131 impl<'a
, 'tcx
> HasMoveData
<'tcx
> for MaybeUninitializedPlaces
<'a
, 'tcx
> {
132 fn move_data(&self) -> &MoveData
<'tcx
> {
137 /// `DefinitelyInitializedPlaces` tracks all places that are definitely
138 /// initialized upon reaching a particular point in the control flow
141 /// For example, in code like the following, we have corresponding
142 /// dataflow information shown in the right-hand comments.
146 /// fn foo(pred: bool) { // definite-init:
148 /// let a = S; let b = S; let c; let d; // {a, b }
151 /// drop(a); // { b, }
155 /// drop(b); // {a, }
164 /// To determine whether a place *may* be uninitialized at a
165 /// particular control-flow point, one can take the set-complement
168 /// Similarly, at a given `drop` statement, the set-difference between
169 /// this data and `MaybeInitializedPlaces` yields the set of places
170 /// that would require a dynamic drop-flag at that statement.
171 pub struct DefinitelyInitializedPlaces
<'a
, 'tcx
> {
173 body
: &'a Body
<'tcx
>,
174 mdpe
: &'a MoveDataParamEnv
<'tcx
>,
177 impl<'a
, 'tcx
> DefinitelyInitializedPlaces
<'a
, 'tcx
> {
178 pub fn new(tcx
: TyCtxt
<'tcx
>, body
: &'a Body
<'tcx
>, mdpe
: &'a MoveDataParamEnv
<'tcx
>) -> Self {
179 DefinitelyInitializedPlaces { tcx, body, mdpe }
183 impl<'a
, 'tcx
> HasMoveData
<'tcx
> for DefinitelyInitializedPlaces
<'a
, 'tcx
> {
184 fn move_data(&self) -> &MoveData
<'tcx
> {
189 /// `EverInitializedPlaces` tracks all places that might have ever been
190 /// initialized upon reaching a particular point in the control flow
191 /// for a function, without an intervening `Storage Dead`.
193 /// This dataflow is used to determine if an immutable local variable may
196 /// For example, in code like the following, we have corresponding
197 /// dataflow information shown in the right-hand comments.
201 /// fn foo(pred: bool) { // ever-init:
203 /// let a = S; let b = S; let c; let d; // {a, b }
206 /// drop(a); // {a, b, }
207 /// b = S; // {a, b, }
210 /// drop(b); // {a, b, }
211 /// d = S; // {a, b, d }
215 /// c = S; // {a, b, c, d }
218 pub struct EverInitializedPlaces
<'a
, 'tcx
> {
221 body
: &'a Body
<'tcx
>,
222 mdpe
: &'a MoveDataParamEnv
<'tcx
>,
225 impl<'a
, 'tcx
> EverInitializedPlaces
<'a
, 'tcx
> {
226 pub fn new(tcx
: TyCtxt
<'tcx
>, body
: &'a Body
<'tcx
>, mdpe
: &'a MoveDataParamEnv
<'tcx
>) -> Self {
227 EverInitializedPlaces { tcx, body, mdpe }
231 impl<'a
, 'tcx
> HasMoveData
<'tcx
> for EverInitializedPlaces
<'a
, 'tcx
> {
232 fn move_data(&self) -> &MoveData
<'tcx
> {
237 impl<'a
, 'tcx
> MaybeInitializedPlaces
<'a
, 'tcx
> {
239 trans
: &mut impl GenKill
<MovePathIndex
>,
241 state
: DropFlagState
,
244 DropFlagState
::Absent
=> trans
.kill(path
),
245 DropFlagState
::Present
=> trans
.gen(path
),
250 impl<'a
, 'tcx
> MaybeUninitializedPlaces
<'a
, 'tcx
> {
252 trans
: &mut impl GenKill
<MovePathIndex
>,
254 state
: DropFlagState
,
257 DropFlagState
::Absent
=> trans
.gen(path
),
258 DropFlagState
::Present
=> trans
.kill(path
),
263 impl<'a
, 'tcx
> DefinitelyInitializedPlaces
<'a
, 'tcx
> {
265 trans
: &mut impl GenKill
<MovePathIndex
>,
267 state
: DropFlagState
,
270 DropFlagState
::Absent
=> trans
.kill(path
),
271 DropFlagState
::Present
=> trans
.gen(path
),
276 impl<'tcx
> AnalysisDomain
<'tcx
> for MaybeInitializedPlaces
<'_
, 'tcx
> {
277 type Idx
= MovePathIndex
;
279 const NAME
: &'
static str = "maybe_init";
281 fn bits_per_block(&self, _
: &mir
::Body
<'tcx
>) -> usize {
282 self.move_data().move_paths
.len()
285 fn initialize_start_block(&self, _
: &mir
::Body
<'tcx
>, state
: &mut BitSet
<Self::Idx
>) {
286 drop_flag_effects_for_function_entry(self.tcx
, self.body
, self.mdpe
, |path
, s
| {
287 assert
!(s
== DropFlagState
::Present
);
292 fn pretty_print_idx(&self, w
: &mut impl std
::io
::Write
, mpi
: Self::Idx
) -> std
::io
::Result
<()> {
293 write
!(w
, "{}", self.move_data().move_paths
[mpi
])
297 impl<'tcx
> GenKillAnalysis
<'tcx
> for MaybeInitializedPlaces
<'_
, 'tcx
> {
300 trans
: &mut impl GenKill
<Self::Idx
>,
301 _statement
: &mir
::Statement
<'tcx
>,
304 drop_flag_effects_for_location(self.tcx
, self.body
, self.mdpe
, location
, |path
, s
| {
305 Self::update_bits(trans
, path
, s
)
309 fn terminator_effect(
311 trans
: &mut impl GenKill
<Self::Idx
>,
312 _terminator
: &mir
::Terminator
<'tcx
>,
315 drop_flag_effects_for_location(self.tcx
, self.body
, self.mdpe
, location
, |path
, s
| {
316 Self::update_bits(trans
, path
, s
)
320 fn call_return_effect(
322 trans
: &mut impl GenKill
<Self::Idx
>,
323 _block
: mir
::BasicBlock
,
324 _func
: &mir
::Operand
<'tcx
>,
325 _args
: &[mir
::Operand
<'tcx
>],
326 dest_place
: mir
::Place
<'tcx
>,
328 // when a call returns successfully, that means we need to set
329 // the bits for that dest_place to 1 (initialized).
330 on_lookup_result_bits(
334 self.move_data().rev_lookup
.find(dest_place
.as_ref()),
341 fn discriminant_switch_effect(
343 trans
: &mut impl GenKill
<Self::Idx
>,
344 _block
: mir
::BasicBlock
,
345 enum_place
: mir
::Place
<'tcx
>,
349 let enum_mpi
= match self.move_data().rev_lookup
.find(enum_place
.as_ref()) {
350 LookupResult
::Exact(mpi
) => mpi
,
351 LookupResult
::Parent(_
) => return,
354 // Kill all move paths that correspond to variants other than this one
355 let move_paths
= &self.move_data().move_paths
;
356 let enum_path
= &move_paths
[enum_mpi
];
357 for (mpi
, variant_path
) in enum_path
.children(move_paths
) {
359 match variant_path
.place
.projection
.last().unwrap() {
360 mir
::ProjectionElem
::Downcast(_
, idx
) if *idx
== variant
=> continue,
361 _
=> drop_flag_effects
::on_all_children_bits(
366 |mpi
| trans
.kill(mpi
),
373 impl<'tcx
> AnalysisDomain
<'tcx
> for MaybeUninitializedPlaces
<'_
, 'tcx
> {
374 type Idx
= MovePathIndex
;
376 const NAME
: &'
static str = "maybe_uninit";
378 fn bits_per_block(&self, _
: &mir
::Body
<'tcx
>) -> usize {
379 self.move_data().move_paths
.len()
382 // sets on_entry bits for Arg places
383 fn initialize_start_block(&self, body
: &mir
::Body
<'tcx
>, state
: &mut BitSet
<Self::Idx
>) {
384 // set all bits to 1 (uninit) before gathering counterevidence
385 assert
!(self.bits_per_block(body
) == state
.domain_size());
388 drop_flag_effects_for_function_entry(self.tcx
, self.body
, self.mdpe
, |path
, s
| {
389 assert
!(s
== DropFlagState
::Present
);
394 fn pretty_print_idx(&self, w
: &mut impl std
::io
::Write
, mpi
: Self::Idx
) -> std
::io
::Result
<()> {
395 write
!(w
, "{}", self.move_data().move_paths
[mpi
])
399 impl<'tcx
> GenKillAnalysis
<'tcx
> for MaybeUninitializedPlaces
<'_
, 'tcx
> {
402 trans
: &mut impl GenKill
<Self::Idx
>,
403 _statement
: &mir
::Statement
<'tcx
>,
406 drop_flag_effects_for_location(self.tcx
, self.body
, self.mdpe
, location
, |path
, s
| {
407 Self::update_bits(trans
, path
, s
)
411 fn terminator_effect(
413 trans
: &mut impl GenKill
<Self::Idx
>,
414 _terminator
: &mir
::Terminator
<'tcx
>,
417 drop_flag_effects_for_location(self.tcx
, self.body
, self.mdpe
, location
, |path
, s
| {
418 Self::update_bits(trans
, path
, s
)
422 fn call_return_effect(
424 trans
: &mut impl GenKill
<Self::Idx
>,
425 _block
: mir
::BasicBlock
,
426 _func
: &mir
::Operand
<'tcx
>,
427 _args
: &[mir
::Operand
<'tcx
>],
428 dest_place
: mir
::Place
<'tcx
>,
430 // when a call returns successfully, that means we need to set
431 // the bits for that dest_place to 0 (initialized).
432 on_lookup_result_bits(
436 self.move_data().rev_lookup
.find(dest_place
.as_ref()),
444 impl<'a
, 'tcx
> AnalysisDomain
<'tcx
> for DefinitelyInitializedPlaces
<'a
, 'tcx
> {
445 type Idx
= MovePathIndex
;
447 const NAME
: &'
static str = "definite_init";
449 fn bits_per_block(&self, _
: &mir
::Body
<'tcx
>) -> usize {
450 self.move_data().move_paths
.len()
453 // sets on_entry bits for Arg places
454 fn initialize_start_block(&self, _
: &mir
::Body
<'tcx
>, state
: &mut BitSet
<Self::Idx
>) {
457 drop_flag_effects_for_function_entry(self.tcx
, self.body
, self.mdpe
, |path
, s
| {
458 assert
!(s
== DropFlagState
::Present
);
463 fn pretty_print_idx(&self, w
: &mut impl std
::io
::Write
, mpi
: Self::Idx
) -> std
::io
::Result
<()> {
464 write
!(w
, "{}", self.move_data().move_paths
[mpi
])
468 impl<'tcx
> GenKillAnalysis
<'tcx
> for DefinitelyInitializedPlaces
<'_
, 'tcx
> {
471 trans
: &mut impl GenKill
<Self::Idx
>,
472 _statement
: &mir
::Statement
<'tcx
>,
475 drop_flag_effects_for_location(self.tcx
, self.body
, self.mdpe
, location
, |path
, s
| {
476 Self::update_bits(trans
, path
, s
)
480 fn terminator_effect(
482 trans
: &mut impl GenKill
<Self::Idx
>,
483 _terminator
: &mir
::Terminator
<'tcx
>,
486 drop_flag_effects_for_location(self.tcx
, self.body
, self.mdpe
, location
, |path
, s
| {
487 Self::update_bits(trans
, path
, s
)
491 fn call_return_effect(
493 trans
: &mut impl GenKill
<Self::Idx
>,
494 _block
: mir
::BasicBlock
,
495 _func
: &mir
::Operand
<'tcx
>,
496 _args
: &[mir
::Operand
<'tcx
>],
497 dest_place
: mir
::Place
<'tcx
>,
499 // when a call returns successfully, that means we need to set
500 // the bits for that dest_place to 1 (initialized).
501 on_lookup_result_bits(
505 self.move_data().rev_lookup
.find(dest_place
.as_ref()),
513 impl<'tcx
> AnalysisDomain
<'tcx
> for EverInitializedPlaces
<'_
, 'tcx
> {
514 type Idx
= InitIndex
;
516 const NAME
: &'
static str = "ever_init";
518 fn bits_per_block(&self, _
: &mir
::Body
<'tcx
>) -> usize {
519 self.move_data().inits
.len()
522 fn initialize_start_block(&self, body
: &mir
::Body
<'tcx
>, state
: &mut BitSet
<Self::Idx
>) {
523 for arg_init
in 0..body
.arg_count
{
524 state
.insert(InitIndex
::new(arg_init
));
529 impl<'tcx
> GenKillAnalysis
<'tcx
> for EverInitializedPlaces
<'_
, 'tcx
> {
532 trans
: &mut impl GenKill
<Self::Idx
>,
533 stmt
: &mir
::Statement
<'tcx
>,
536 let move_data
= self.move_data();
537 let init_path_map
= &move_data
.init_path_map
;
538 let init_loc_map
= &move_data
.init_loc_map
;
539 let rev_lookup
= &move_data
.rev_lookup
;
542 "statement {:?} at loc {:?} initializes move_indexes {:?}",
543 stmt
, location
, &init_loc_map
[location
]
545 trans
.gen_all(init_loc_map
[location
].iter().copied());
547 if let mir
::StatementKind
::StorageDead(local
) = stmt
.kind
{
548 // End inits for StorageDead, so that an immutable variable can
549 // be reinitialized on the next iteration of the loop.
550 let move_path_index
= rev_lookup
.find_local(local
);
552 "stmt {:?} at loc {:?} clears the ever initialized status of {:?}",
553 stmt
, location
, &init_path_map
[move_path_index
]
555 trans
.kill_all(init_path_map
[move_path_index
].iter().copied());
559 fn terminator_effect(
561 trans
: &mut impl GenKill
<Self::Idx
>,
562 _terminator
: &mir
::Terminator
<'tcx
>,
565 let (body
, move_data
) = (self.body
, self.move_data());
566 let term
= body
[location
.block
].terminator();
567 let init_loc_map
= &move_data
.init_loc_map
;
569 "terminator {:?} at loc {:?} initializes move_indexes {:?}",
570 term
, location
, &init_loc_map
[location
]
573 init_loc_map
[location
]
575 .filter(|init_index
| {
576 move_data
.inits
[**init_index
].kind
!= InitKind
::NonPanicPathOnly
582 fn call_return_effect(
584 trans
: &mut impl GenKill
<Self::Idx
>,
585 block
: mir
::BasicBlock
,
586 _func
: &mir
::Operand
<'tcx
>,
587 _args
: &[mir
::Operand
<'tcx
>],
588 _dest_place
: mir
::Place
<'tcx
>,
590 let move_data
= self.move_data();
591 let init_loc_map
= &move_data
.init_loc_map
;
593 let call_loc
= self.body
.terminator_loc(block
);
594 for init_index
in &init_loc_map
[call_loc
] {
595 trans
.gen(*init_index
);
600 impl<'a
, 'tcx
> BottomValue
for MaybeInitializedPlaces
<'a
, 'tcx
> {
601 /// bottom = uninitialized
602 const BOTTOM_VALUE
: bool
= false;
605 impl<'a
, 'tcx
> BottomValue
for MaybeUninitializedPlaces
<'a
, 'tcx
> {
606 /// bottom = initialized (start_block_effect counters this at outset)
607 const BOTTOM_VALUE
: bool
= false;
610 impl<'a
, 'tcx
> BottomValue
for DefinitelyInitializedPlaces
<'a
, 'tcx
> {
611 /// bottom = initialized (start_block_effect counters this at outset)
612 const BOTTOM_VALUE
: bool
= true;
615 impl<'a
, 'tcx
> BottomValue
for EverInitializedPlaces
<'a
, 'tcx
> {
616 /// bottom = no initialized variables by default
617 const BOTTOM_VALUE
: bool
= false;