]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_mir_dataflow/src/impls/mod.rs
New upstream version 1.61.0+dfsg1
[rustc.git] / compiler / rustc_mir_dataflow / src / impls / mod.rs
CommitLineData
041b39d2
XL
1//! Dataflow analyses are built upon some interpretation of the
2//! bitvectors attached to each basic block, represented via a
3//! zero-sized structure.
4
ee023bcb 5use rustc_index::bit_set::{BitSet, ChunkedBitSet};
e74abb32 6use rustc_index::vec::Idx;
3c0e092e 7use rustc_middle::mir::visit::{MirVisitable, Visitor};
ba9703b0
XL
8use rustc_middle::mir::{self, Body, Location};
9use rustc_middle::ty::{self, TyCtxt};
3157f602 10
c295e0f8
XL
11use crate::drop_flag_effects_for_function_entry;
12use crate::drop_flag_effects_for_location;
13use crate::elaborate_drops::DropFlagState;
a2a8927a 14use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
3c0e092e 15use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
c295e0f8
XL
16use crate::on_lookup_result_bits;
17use crate::MoveDataParamEnv;
3c0e092e 18use crate::{drop_flag_effects, on_all_children_bits};
c295e0f8 19use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
3157f602 20
2c00a5a8 21mod borrowed_locals;
f035d41b 22mod init_locals;
f9f354fc 23mod liveness;
e74abb32 24mod storage_liveness;
2c00a5a8 25
3c0e092e 26pub use self::borrowed_locals::MaybeBorrowedLocals;
f035d41b 27pub use self::init_locals::MaybeInitializedLocals;
f9f354fc
XL
28pub use self::liveness::MaybeLiveLocals;
29pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageLive};
3b2f2976 30
2c00a5a8 31/// `MaybeInitializedPlaces` tracks all places that might be
3157f602
XL
32/// initialized upon reaching a particular point in the control flow
33/// for a function.
34///
35/// For example, in code like the following, we have corresponding
36/// dataflow information shown in the right-hand comments.
37///
38/// ```rust
39/// struct S;
40/// fn foo(pred: bool) { // maybe-init:
41/// // {}
42/// let a = S; let b = S; let c; let d; // {a, b}
43///
44/// if pred {
45/// drop(a); // { b}
46/// b = S; // { b}
47///
48/// } else {
49/// drop(b); // {a}
50/// d = S; // {a, d}
51///
52/// } // {a, b, d}
53///
54/// c = S; // {a, b, c, d}
55/// }
56/// ```
57///
2c00a5a8 58/// To determine whether a place *must* be initialized at a
3157f602 59/// particular control-flow point, one can take the set-difference
2c00a5a8 60/// between this data and the data from `MaybeUninitializedPlaces` at the
3157f602
XL
61/// corresponding control-flow point.
62///
63/// Similarly, at a given `drop` statement, the set-intersection
2c00a5a8
XL
64/// between this data and `MaybeUninitializedPlaces` yields the set of
65/// places that would require a dynamic drop-flag at that statement.
dc9dc135
XL
66pub struct MaybeInitializedPlaces<'a, 'tcx> {
67 tcx: TyCtxt<'tcx>,
68 body: &'a Body<'tcx>,
69 mdpe: &'a MoveDataParamEnv<'tcx>,
3157f602
XL
70}
71
dc9dc135
XL
72impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
73 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
60c5eb7d 74 MaybeInitializedPlaces { tcx, body, mdpe }
3157f602
XL
75 }
76}
77
dc9dc135 78impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
dfeec247
XL
79 fn move_data(&self) -> &MoveData<'tcx> {
80 &self.mdpe.move_data
81 }
32a655c1
SL
82}
83
2c00a5a8 84/// `MaybeUninitializedPlaces` tracks all places that might be
3157f602
XL
85/// uninitialized upon reaching a particular point in the control flow
86/// for a function.
87///
88/// For example, in code like the following, we have corresponding
89/// dataflow information shown in the right-hand comments.
90///
91/// ```rust
92/// struct S;
93/// fn foo(pred: bool) { // maybe-uninit:
94/// // {a, b, c, d}
95/// let a = S; let b = S; let c; let d; // { c, d}
96///
97/// if pred {
98/// drop(a); // {a, c, d}
99/// b = S; // {a, c, d}
100///
101/// } else {
102/// drop(b); // { b, c, d}
103/// d = S; // { b, c }
104///
105/// } // {a, b, c, d}
106///
107/// c = S; // {a, b, d}
108/// }
109/// ```
110///
2c00a5a8 111/// To determine whether a place *must* be uninitialized at a
3157f602 112/// particular control-flow point, one can take the set-difference
2c00a5a8 113/// between this data and the data from `MaybeInitializedPlaces` at the
3157f602
XL
114/// corresponding control-flow point.
115///
116/// Similarly, at a given `drop` statement, the set-intersection
2c00a5a8
XL
117/// between this data and `MaybeInitializedPlaces` yields the set of
118/// places that would require a dynamic drop-flag at that statement.
dc9dc135
XL
119pub struct MaybeUninitializedPlaces<'a, 'tcx> {
120 tcx: TyCtxt<'tcx>,
121 body: &'a Body<'tcx>,
122 mdpe: &'a MoveDataParamEnv<'tcx>,
f035d41b
XL
123
124 mark_inactive_variants_as_uninit: bool,
3157f602
XL
125}
126
dc9dc135
XL
127impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
128 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
f035d41b
XL
129 MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
130 }
131
132 /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
133 /// enum discriminant.
134 ///
135 /// This is correct in a vacuum but is not the default because it causes problems in the borrow
136 /// checker, where this information gets propagated along `FakeEdge`s.
137 pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
138 self.mark_inactive_variants_as_uninit = true;
139 self
3157f602
XL
140 }
141}
142
dc9dc135 143impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
dfeec247
XL
144 fn move_data(&self) -> &MoveData<'tcx> {
145 &self.mdpe.move_data
146 }
32a655c1
SL
147}
148
2c00a5a8 149/// `DefinitelyInitializedPlaces` tracks all places that are definitely
3157f602
XL
150/// initialized upon reaching a particular point in the control flow
151/// for a function.
152///
3157f602
XL
153/// For example, in code like the following, we have corresponding
154/// dataflow information shown in the right-hand comments.
155///
156/// ```rust
157/// struct S;
158/// fn foo(pred: bool) { // definite-init:
159/// // { }
160/// let a = S; let b = S; let c; let d; // {a, b }
161///
162/// if pred {
163/// drop(a); // { b, }
164/// b = S; // { b, }
165///
166/// } else {
167/// drop(b); // {a, }
168/// d = S; // {a, d}
169///
170/// } // { }
171///
172/// c = S; // { c }
173/// }
174/// ```
175///
2c00a5a8 176/// To determine whether a place *may* be uninitialized at a
3157f602
XL
177/// particular control-flow point, one can take the set-complement
178/// of this data.
179///
180/// Similarly, at a given `drop` statement, the set-difference between
2c00a5a8 181/// this data and `MaybeInitializedPlaces` yields the set of places
3157f602 182/// that would require a dynamic drop-flag at that statement.
dc9dc135
XL
183pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
184 tcx: TyCtxt<'tcx>,
185 body: &'a Body<'tcx>,
186 mdpe: &'a MoveDataParamEnv<'tcx>,
3157f602
XL
187}
188
dc9dc135
XL
189impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
190 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
60c5eb7d 191 DefinitelyInitializedPlaces { tcx, body, mdpe }
3157f602
XL
192 }
193}
194
dc9dc135 195impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
dfeec247
XL
196 fn move_data(&self) -> &MoveData<'tcx> {
197 &self.mdpe.move_data
198 }
32a655c1
SL
199}
200
2c00a5a8 201/// `EverInitializedPlaces` tracks all places that might have ever been
ff7c6d11 202/// initialized upon reaching a particular point in the control flow
1b1a35ee 203/// for a function, without an intervening `StorageDead`.
ff7c6d11
XL
204///
205/// This dataflow is used to determine if an immutable local variable may
206/// be assigned to.
207///
208/// For example, in code like the following, we have corresponding
209/// dataflow information shown in the right-hand comments.
210///
211/// ```rust
212/// struct S;
213/// fn foo(pred: bool) { // ever-init:
214/// // { }
215/// let a = S; let b = S; let c; let d; // {a, b }
216///
217/// if pred {
218/// drop(a); // {a, b, }
219/// b = S; // {a, b, }
220///
221/// } else {
222/// drop(b); // {a, b, }
223/// d = S; // {a, b, d }
224///
225/// } // {a, b, d }
226///
227/// c = S; // {a, b, c, d }
228/// }
229/// ```
dc9dc135 230pub struct EverInitializedPlaces<'a, 'tcx> {
74b04a01 231 #[allow(dead_code)]
dc9dc135
XL
232 tcx: TyCtxt<'tcx>,
233 body: &'a Body<'tcx>,
234 mdpe: &'a MoveDataParamEnv<'tcx>,
ff7c6d11
XL
235}
236
dc9dc135
XL
237impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
238 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
60c5eb7d 239 EverInitializedPlaces { tcx, body, mdpe }
ff7c6d11
XL
240 }
241}
242
dc9dc135 243impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
dfeec247
XL
244 fn move_data(&self) -> &MoveData<'tcx> {
245 &self.mdpe.move_data
246 }
ff7c6d11
XL
247}
248
dc9dc135 249impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
dfeec247 250 fn update_bits(
74b04a01 251 trans: &mut impl GenKill<MovePathIndex>,
dfeec247
XL
252 path: MovePathIndex,
253 state: DropFlagState,
254 ) {
3157f602 255 match state {
dc9dc135
XL
256 DropFlagState::Absent => trans.kill(path),
257 DropFlagState::Present => trans.gen(path),
3157f602
XL
258 }
259 }
260}
261
dc9dc135 262impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
dfeec247 263 fn update_bits(
74b04a01 264 trans: &mut impl GenKill<MovePathIndex>,
dfeec247
XL
265 path: MovePathIndex,
266 state: DropFlagState,
267 ) {
3157f602 268 match state {
dc9dc135
XL
269 DropFlagState::Absent => trans.gen(path),
270 DropFlagState::Present => trans.kill(path),
3157f602
XL
271 }
272 }
273}
274
dc9dc135 275impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
dfeec247 276 fn update_bits(
74b04a01 277 trans: &mut impl GenKill<MovePathIndex>,
dfeec247
XL
278 path: MovePathIndex,
279 state: DropFlagState,
280 ) {
3157f602 281 match state {
dc9dc135
XL
282 DropFlagState::Absent => trans.kill(path),
283 DropFlagState::Present => trans.gen(path),
3157f602
XL
284 }
285 }
286}
287
74b04a01 288impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
ee023bcb 289 type Domain = ChunkedBitSet<MovePathIndex>;
74b04a01
XL
290 const NAME: &'static str = "maybe_init";
291
1b1a35ee
XL
292 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
293 // bottom = uninitialized
ee023bcb 294 ChunkedBitSet::new_empty(self.move_data().move_paths.len())
3157f602
XL
295 }
296
1b1a35ee 297 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
dfeec247
XL
298 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
299 assert!(s == DropFlagState::Present);
74b04a01 300 state.insert(path);
dfeec247
XL
301 });
302 }
74b04a01
XL
303}
304
305impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
1b1a35ee
XL
306 type Idx = MovePathIndex;
307
74b04a01
XL
308 fn statement_effect(
309 &self,
310 trans: &mut impl GenKill<Self::Idx>,
3c0e092e 311 statement: &mir::Statement<'tcx>,
74b04a01
XL
312 location: Location,
313 ) {
dfeec247
XL
314 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
315 Self::update_bits(trans, path, s)
3c0e092e
XL
316 });
317
318 if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
319 return;
320 }
321
322 // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
323 for_each_mut_borrow(statement, location, |place| {
324 let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
325 on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
326 trans.gen(child);
327 })
dfeec247
XL
328 })
329 }
330
74b04a01
XL
331 fn terminator_effect(
332 &self,
333 trans: &mut impl GenKill<Self::Idx>,
3c0e092e 334 terminator: &mir::Terminator<'tcx>,
74b04a01
XL
335 location: Location,
336 ) {
dfeec247
XL
337 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
338 Self::update_bits(trans, path, s)
3c0e092e
XL
339 });
340
341 if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
342 return;
343 }
344
345 for_each_mut_borrow(terminator, location, |place| {
346 let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else { return };
347 on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
348 trans.gen(child);
349 })
dfeec247 350 })
3157f602
XL
351 }
352
74b04a01 353 fn call_return_effect(
0731742a 354 &self,
74b04a01
XL
355 trans: &mut impl GenKill<Self::Idx>,
356 _block: mir::BasicBlock,
a2a8927a 357 return_places: CallReturnPlaces<'_, 'tcx>,
0731742a 358 ) {
a2a8927a
XL
359 return_places.for_each(|place| {
360 // when a call returns successfully, that means we need to set
361 // the bits for that dest_place to 1 (initialized).
362 on_lookup_result_bits(
363 self.tcx,
364 self.body,
365 self.move_data(),
366 self.move_data().rev_lookup.find(place.as_ref()),
367 |mpi| {
368 trans.gen(mpi);
369 },
370 );
371 });
3157f602 372 }
74b04a01 373
1b1a35ee 374 fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
74b04a01 375 &self,
1b1a35ee
XL
376 block: mir::BasicBlock,
377 discr: &mir::Operand<'tcx>,
378 edge_effects: &mut impl SwitchIntEdgeEffects<G>,
74b04a01 379 ) {
1b1a35ee
XL
380 if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
381 return;
382 }
383
384 let enum_ = discr.place().and_then(|discr| {
385 switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
386 });
387
ee023bcb
FG
388 let Some((enum_place, enum_def)) = enum_ else {
389 return;
1b1a35ee
XL
390 };
391
392 let mut discriminants = enum_def.discriminants(self.tcx);
393 edge_effects.apply(|trans, edge| {
ee023bcb
FG
394 let Some(value) = edge.value else {
395 return;
1b1a35ee
XL
396 };
397
398 // MIR building adds discriminants to the `values` array in the same order as they
399 // are yielded by `AdtDef::discriminants`. We rely on this to match each
400 // discriminant in `values` to its corresponding variant in linear time.
401 let (variant, _) = discriminants
402 .find(|&(_, discr)| discr.val == value)
403 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
404
405 // Kill all move paths that correspond to variants we know to be inactive along this
406 // particular outgoing edge of a `SwitchInt`.
407 drop_flag_effects::on_all_inactive_variants(
408 self.tcx,
409 self.body,
410 self.move_data(),
411 enum_place,
412 variant,
413 |mpi| trans.kill(mpi),
414 );
415 });
74b04a01 416 }
3157f602
XL
417}
418
74b04a01 419impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
ee023bcb 420 type Domain = ChunkedBitSet<MovePathIndex>;
74b04a01
XL
421
422 const NAME: &'static str = "maybe_uninit";
423
1b1a35ee
XL
424 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
425 // bottom = initialized (start_block_effect counters this at outset)
ee023bcb 426 ChunkedBitSet::new_empty(self.move_data().move_paths.len())
3157f602
XL
427 }
428
ff7c6d11 429 // sets on_entry bits for Arg places
1b1a35ee 430 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
ee023bcb 431 // set all bits to 1 (uninit) before gathering counter-evidence
74b04a01 432 state.insert_all();
3157f602 433
dfeec247
XL
434 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
435 assert!(s == DropFlagState::Present);
74b04a01 436 state.remove(path);
dfeec247 437 });
3157f602 438 }
74b04a01
XL
439}
440
441impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
1b1a35ee
XL
442 type Idx = MovePathIndex;
443
74b04a01
XL
444 fn statement_effect(
445 &self,
446 trans: &mut impl GenKill<Self::Idx>,
447 _statement: &mir::Statement<'tcx>,
448 location: Location,
449 ) {
dfeec247
XL
450 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
451 Self::update_bits(trans, path, s)
3c0e092e
XL
452 });
453
454 // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
455 // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
3157f602
XL
456 }
457
74b04a01
XL
458 fn terminator_effect(
459 &self,
460 trans: &mut impl GenKill<Self::Idx>,
461 _terminator: &mir::Terminator<'tcx>,
462 location: Location,
463 ) {
dfeec247
XL
464 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
465 Self::update_bits(trans, path, s)
3c0e092e 466 });
3157f602
XL
467 }
468
74b04a01 469 fn call_return_effect(
0731742a 470 &self,
74b04a01
XL
471 trans: &mut impl GenKill<Self::Idx>,
472 _block: mir::BasicBlock,
a2a8927a 473 return_places: CallReturnPlaces<'_, 'tcx>,
0731742a 474 ) {
a2a8927a
XL
475 return_places.for_each(|place| {
476 // when a call returns successfully, that means we need to set
477 // the bits for that dest_place to 0 (initialized).
478 on_lookup_result_bits(
479 self.tcx,
480 self.body,
481 self.move_data(),
482 self.move_data().rev_lookup.find(place.as_ref()),
483 |mpi| {
484 trans.kill(mpi);
485 },
486 );
487 });
3157f602 488 }
f035d41b 489
1b1a35ee 490 fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
f035d41b 491 &self,
1b1a35ee
XL
492 block: mir::BasicBlock,
493 discr: &mir::Operand<'tcx>,
494 edge_effects: &mut impl SwitchIntEdgeEffects<G>,
f035d41b 495 ) {
1b1a35ee
XL
496 if !self.tcx.sess.opts.debugging_opts.precise_enum_drop_elaboration {
497 return;
498 }
499
f035d41b
XL
500 if !self.mark_inactive_variants_as_uninit {
501 return;
502 }
503
1b1a35ee
XL
504 let enum_ = discr.place().and_then(|discr| {
505 switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
506 });
507
ee023bcb
FG
508 let Some((enum_place, enum_def)) = enum_ else {
509 return;
1b1a35ee
XL
510 };
511
512 let mut discriminants = enum_def.discriminants(self.tcx);
513 edge_effects.apply(|trans, edge| {
ee023bcb
FG
514 let Some(value) = edge.value else {
515 return;
1b1a35ee
XL
516 };
517
518 // MIR building adds discriminants to the `values` array in the same order as they
519 // are yielded by `AdtDef::discriminants`. We rely on this to match each
520 // discriminant in `values` to its corresponding variant in linear time.
521 let (variant, _) = discriminants
522 .find(|&(_, discr)| discr.val == value)
523 .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
524
525 // Mark all move paths that correspond to variants other than this one as maybe
526 // uninitialized (in reality, they are *definitely* uninitialized).
527 drop_flag_effects::on_all_inactive_variants(
528 self.tcx,
529 self.body,
530 self.move_data(),
531 enum_place,
532 variant,
533 |mpi| trans.gen(mpi),
534 );
535 });
f035d41b 536 }
3157f602
XL
537}
538
74b04a01 539impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
1b1a35ee
XL
540 /// Use set intersection as the join operator.
541 type Domain = lattice::Dual<BitSet<MovePathIndex>>;
74b04a01
XL
542
543 const NAME: &'static str = "definite_init";
544
1b1a35ee
XL
545 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
546 // bottom = initialized (start_block_effect counters this at outset)
547 lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
3157f602
XL
548 }
549
ff7c6d11 550 // sets on_entry bits for Arg places
1b1a35ee
XL
551 fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
552 state.0.clear();
3157f602 553
dfeec247
XL
554 drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
555 assert!(s == DropFlagState::Present);
1b1a35ee 556 state.0.insert(path);
dfeec247 557 });
3157f602 558 }
74b04a01
XL
559}
560
561impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
1b1a35ee
XL
562 type Idx = MovePathIndex;
563
74b04a01
XL
564 fn statement_effect(
565 &self,
566 trans: &mut impl GenKill<Self::Idx>,
567 _statement: &mir::Statement<'tcx>,
568 location: Location,
569 ) {
dfeec247
XL
570 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
571 Self::update_bits(trans, path, s)
572 })
3157f602
XL
573 }
574
74b04a01
XL
575 fn terminator_effect(
576 &self,
577 trans: &mut impl GenKill<Self::Idx>,
578 _terminator: &mir::Terminator<'tcx>,
579 location: Location,
580 ) {
dfeec247
XL
581 drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
582 Self::update_bits(trans, path, s)
583 })
3157f602
XL
584 }
585
74b04a01 586 fn call_return_effect(
0731742a 587 &self,
74b04a01
XL
588 trans: &mut impl GenKill<Self::Idx>,
589 _block: mir::BasicBlock,
a2a8927a 590 return_places: CallReturnPlaces<'_, 'tcx>,
0731742a 591 ) {
a2a8927a
XL
592 return_places.for_each(|place| {
593 // when a call returns successfully, that means we need to set
594 // the bits for that dest_place to 1 (initialized).
595 on_lookup_result_bits(
596 self.tcx,
597 self.body,
598 self.move_data(),
599 self.move_data().rev_lookup.find(place.as_ref()),
600 |mpi| {
601 trans.gen(mpi);
602 },
603 );
604 });
3157f602
XL
605 }
606}
607
74b04a01 608impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
ee023bcb 609 type Domain = ChunkedBitSet<InitIndex>;
74b04a01
XL
610
611 const NAME: &'static str = "ever_init";
612
1b1a35ee
XL
613 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
614 // bottom = no initialized variables by default
ee023bcb 615 ChunkedBitSet::new_empty(self.move_data().inits.len())
ff7c6d11 616 }
abe05a73 617
1b1a35ee 618 fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
74b04a01
XL
619 for arg_init in 0..body.arg_count {
620 state.insert(InitIndex::new(arg_init));
ff7c6d11
XL
621 }
622 }
74b04a01 623}
ff7c6d11 624
74b04a01 625impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
1b1a35ee
XL
626 type Idx = InitIndex;
627
c295e0f8 628 #[instrument(skip(self, trans), level = "debug")]
74b04a01
XL
629 fn statement_effect(
630 &self,
631 trans: &mut impl GenKill<Self::Idx>,
632 stmt: &mir::Statement<'tcx>,
633 location: Location,
634 ) {
635 let move_data = self.move_data();
ff7c6d11
XL
636 let init_path_map = &move_data.init_path_map;
637 let init_loc_map = &move_data.init_loc_map;
638 let rev_lookup = &move_data.rev_lookup;
639
c295e0f8 640 debug!("initializes move_indexes {:?}", &init_loc_map[location]);
74b04a01 641 trans.gen_all(init_loc_map[location].iter().copied());
ff7c6d11 642
ba9703b0
XL
643 if let mir::StatementKind::StorageDead(local) = stmt.kind {
644 // End inits for StorageDead, so that an immutable variable can
645 // be reinitialized on the next iteration of the loop.
646 let move_path_index = rev_lookup.find_local(local);
c295e0f8 647 debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
ba9703b0 648 trans.kill_all(init_path_map[move_path_index].iter().copied());
ff7c6d11
XL
649 }
650 }
651
c295e0f8 652 #[instrument(skip(self, trans, _terminator), level = "debug")]
74b04a01
XL
653 fn terminator_effect(
654 &self,
655 trans: &mut impl GenKill<Self::Idx>,
656 _terminator: &mir::Terminator<'tcx>,
657 location: Location,
658 ) {
dc9dc135
XL
659 let (body, move_data) = (self.body, self.move_data());
660 let term = body[location.block].terminator();
ff7c6d11 661 let init_loc_map = &move_data.init_loc_map;
c295e0f8
XL
662 debug!(?term);
663 debug!("initializes move_indexes {:?}", init_loc_map[location]);
dc9dc135 664 trans.gen_all(
74b04a01
XL
665 init_loc_map[location]
666 .iter()
667 .filter(|init_index| {
668 move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
669 })
670 .copied(),
ff7c6d11
XL
671 );
672 }
673
74b04a01 674 fn call_return_effect(
0731742a 675 &self,
74b04a01
XL
676 trans: &mut impl GenKill<Self::Idx>,
677 block: mir::BasicBlock,
a2a8927a 678 _return_places: CallReturnPlaces<'_, 'tcx>,
0731742a 679 ) {
ff7c6d11 680 let move_data = self.move_data();
ff7c6d11
XL
681 let init_loc_map = &move_data.init_loc_map;
682
74b04a01 683 let call_loc = self.body.terminator_loc(block);
ff7c6d11 684 for init_index in &init_loc_map[call_loc] {
74b04a01 685 trans.gen(*init_index);
ff7c6d11
XL
686 }
687 }
688}
abe05a73 689
1b1a35ee
XL
690/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
691/// an enum discriminant.
692///
693/// We expect such blocks to have a call to `discriminant` as their last statement like so:
694///
695/// ```text
696/// ...
697/// _42 = discriminant(_1)
698/// SwitchInt(_42, ..)
699/// ```
700///
701/// If the basic block matches this pattern, this function returns the place corresponding to the
702/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
a2a8927a 703fn switch_on_enum_discriminant<'mir, 'tcx>(
1b1a35ee
XL
704 tcx: TyCtxt<'tcx>,
705 body: &'mir mir::Body<'tcx>,
706 block: &'mir mir::BasicBlockData<'tcx>,
707 switch_on: mir::Place<'tcx>,
ee023bcb
FG
708) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
709 for statement in block.statements.iter().rev() {
710 match &statement.kind {
711 mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
712 if *lhs == switch_on =>
713 {
714 match discriminated.ty(body, tcx).ty.kind() {
715 ty::Adt(def, _) => return Some((*discriminated, *def)),
716
717 // `Rvalue::Discriminant` is also used to get the active yield point for a
718 // generator, but we do not need edge-specific effects in that case. This may
719 // change in the future.
720 ty::Generator(..) => return None,
721
722 t => bug!("`discriminant` called on unexpected type {:?}", t),
723 }
1b1a35ee 724 }
ee023bcb
FG
725 mir::StatementKind::Coverage(_) => continue,
726 _ => return None,
1b1a35ee 727 }
1b1a35ee 728 }
ee023bcb 729 None
ff7c6d11 730}
3c0e092e
XL
731
732struct OnMutBorrow<F>(F);
733
734impl<F> Visitor<'_> for OnMutBorrow<F>
735where
736 F: FnMut(&mir::Place<'_>),
737{
738 fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'_>, location: Location) {
739 // FIXME: Does `&raw const foo` allow mutation? See #90413.
740 match rvalue {
741 mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
742 | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
743
744 _ => {}
745 }
746
747 self.super_rvalue(rvalue, location)
748 }
749}
750
751/// Calls `f` for each mutable borrow or raw reference in the program.
752///
753/// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
754/// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
755/// other analyses will likely need to check for `!Freeze`.
756fn for_each_mut_borrow<'tcx>(
757 mir: &impl MirVisitable<'tcx>,
758 location: Location,
759 f: impl FnMut(&mir::Place<'_>),
760) {
761 let mut vis = OnMutBorrow(f);
762
763 mir.apply(location, &mut vis);
764}