1 use crate::deref_separator
::deref_finder
;
3 use rustc_data_structures
::fx
::FxHashMap
;
4 use rustc_index
::bit_set
::BitSet
;
5 use rustc_middle
::mir
::patch
::MirPatch
;
6 use rustc_middle
::mir
::*;
7 use rustc_middle
::ty
::{self, TyCtxt}
;
8 use rustc_mir_dataflow
::elaborate_drops
::{elaborate_drop, DropFlagState, Unwind}
;
9 use rustc_mir_dataflow
::elaborate_drops
::{DropElaborator, DropFlagMode, DropStyle}
;
10 use rustc_mir_dataflow
::impls
::{MaybeInitializedPlaces, MaybeUninitializedPlaces}
;
11 use rustc_mir_dataflow
::move_paths
::{LookupResult, MoveData, MovePathIndex}
;
12 use rustc_mir_dataflow
::on_lookup_result_bits
;
13 use rustc_mir_dataflow
::un_derefer
::UnDerefer
;
14 use rustc_mir_dataflow
::MoveDataParamEnv
;
15 use rustc_mir_dataflow
::{on_all_children_bits, on_all_drop_children_bits}
;
16 use rustc_mir_dataflow
::{Analysis, ResultsCursor}
;
18 use rustc_target
::abi
::VariantIdx
;
21 pub struct ElaborateDrops
;
23 impl<'tcx
> MirPass
<'tcx
> for ElaborateDrops
{
24 fn run_pass(&self, tcx
: TyCtxt
<'tcx
>, body
: &mut Body
<'tcx
>) {
25 debug
!("elaborate_drops({:?} @ {:?})", body
.source
, body
.span
);
27 let def_id
= body
.source
.def_id();
28 let param_env
= tcx
.param_env_reveal_all_normalized(def_id
);
29 let (side_table
, move_data
) = match MoveData
::gather_moves(body
, tcx
, param_env
) {
30 Ok(move_data
) => move_data
,
31 Err((move_data
, _
)) => {
32 tcx
.sess
.delay_span_bug(
34 "No `move_errors` should be allowed in MIR borrowck",
36 (Default
::default(), move_data
)
39 let un_derefer
= UnDerefer { tcx: tcx, derefer_sidetable: side_table }
;
40 let elaborate_patch
= {
42 let env
= MoveDataParamEnv { move_data, param_env }
;
43 let dead_unwinds
= find_dead_unwinds(tcx
, body
, &env
, &un_derefer
);
45 let inits
= MaybeInitializedPlaces
::new(tcx
, body
, &env
)
46 .into_engine(tcx
, body
)
47 .dead_unwinds(&dead_unwinds
)
48 .pass_name("elaborate_drops")
49 .iterate_to_fixpoint()
50 .into_results_cursor(body
);
52 let uninits
= MaybeUninitializedPlaces
::new(tcx
, body
, &env
)
53 .mark_inactive_variants_as_uninit()
54 .into_engine(tcx
, body
)
55 .dead_unwinds(&dead_unwinds
)
56 .pass_name("elaborate_drops")
57 .iterate_to_fixpoint()
58 .into_results_cursor(body
);
64 init_data
: InitializationData { inits, uninits }
,
65 drop_flags
: Default
::default(),
66 patch
: MirPatch
::new(body
),
67 un_derefer
: un_derefer
,
71 elaborate_patch
.apply(body
);
72 deref_finder(tcx
, body
);
76 /// Returns the set of basic blocks whose unwind edges are known
77 /// to not be reachable, because they are `drop` terminators
78 /// that can't drop anything.
79 fn find_dead_unwinds
<'tcx
>(
82 env
: &MoveDataParamEnv
<'tcx
>,
83 und
: &UnDerefer
<'tcx
>,
84 ) -> BitSet
<BasicBlock
> {
85 debug
!("find_dead_unwinds({:?})", body
.span
);
86 // We only need to do this pass once, because unwind edges can only
87 // reach cleanup blocks, which can't have unwind edges themselves.
88 let mut dead_unwinds
= BitSet
::new_empty(body
.basic_blocks
.len());
89 let mut flow_inits
= MaybeInitializedPlaces
::new(tcx
, body
, &env
)
90 .into_engine(tcx
, body
)
91 .pass_name("find_dead_unwinds")
92 .iterate_to_fixpoint()
93 .into_results_cursor(body
);
94 for (bb
, bb_data
) in body
.basic_blocks
.iter_enumerated() {
95 let place
= match bb_data
.terminator().kind
{
96 TerminatorKind
::Drop { ref place, unwind: Some(_), .. }
97 | TerminatorKind
::DropAndReplace { ref place, unwind: Some(_), .. }
=> {
98 und
.derefer(place
.as_ref(), body
).unwrap_or(*place
)
103 debug
!("find_dead_unwinds @ {:?}: {:?}", bb
, bb_data
);
105 let LookupResult
::Exact(path
) = env
.move_data
.rev_lookup
.find(place
.as_ref()) else {
106 debug
!("find_dead_unwinds: has parent; skipping");
110 flow_inits
.seek_before_primary_effect(body
.terminator_loc(bb
));
112 "find_dead_unwinds @ {:?}: path({:?})={:?}; init_data={:?}",
119 let mut maybe_live
= false;
120 on_all_drop_children_bits(tcx
, body
, &env
, path
, |child
| {
121 maybe_live
|= flow_inits
.contains(child
);
124 debug
!("find_dead_unwinds @ {:?}: maybe_live={}", bb
, maybe_live
);
126 dead_unwinds
.insert(bb
);
133 struct InitializationData
<'mir
, 'tcx
> {
134 inits
: ResultsCursor
<'mir
, 'tcx
, MaybeInitializedPlaces
<'mir
, 'tcx
>>,
135 uninits
: ResultsCursor
<'mir
, 'tcx
, MaybeUninitializedPlaces
<'mir
, 'tcx
>>,
138 impl InitializationData
<'_
, '_
> {
139 fn seek_before(&mut self, loc
: Location
) {
140 self.inits
.seek_before_primary_effect(loc
);
141 self.uninits
.seek_before_primary_effect(loc
);
144 fn maybe_live_dead(&self, path
: MovePathIndex
) -> (bool
, bool
) {
145 (self.inits
.contains(path
), self.uninits
.contains(path
))
149 struct Elaborator
<'a
, 'b
, 'tcx
> {
150 ctxt
: &'a
mut ElaborateDropsCtxt
<'b
, 'tcx
>,
153 impl fmt
::Debug
for Elaborator
<'_
, '_
, '_
> {
154 fn fmt(&self, _f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
159 impl<'a
, 'tcx
> DropElaborator
<'a
, 'tcx
> for Elaborator
<'a
, '_
, 'tcx
> {
160 type Path
= MovePathIndex
;
162 fn patch(&mut self) -> &mut MirPatch
<'tcx
> {
166 fn body(&self) -> &'a Body
<'tcx
> {
170 fn tcx(&self) -> TyCtxt
<'tcx
> {
174 fn param_env(&self) -> ty
::ParamEnv
<'tcx
> {
175 self.ctxt
.param_env()
178 fn drop_style(&self, path
: Self::Path
, mode
: DropFlagMode
) -> DropStyle
{
179 let ((maybe_live
, maybe_dead
), multipart
) = match mode
{
180 DropFlagMode
::Shallow
=> (self.ctxt
.init_data
.maybe_live_dead(path
), false),
181 DropFlagMode
::Deep
=> {
182 let mut some_live
= false;
183 let mut some_dead
= false;
184 let mut children_count
= 0;
185 on_all_drop_children_bits(self.tcx(), self.body(), self.ctxt
.env
, path
, |child
| {
186 let (live
, dead
) = self.ctxt
.init_data
.maybe_live_dead(child
);
187 debug
!("elaborate_drop: state({:?}) = {:?}", child
, (live
, dead
));
192 ((some_live
, some_dead
), children_count
!= 1)
195 match (maybe_live
, maybe_dead
, multipart
) {
196 (false, _
, _
) => DropStyle
::Dead
,
197 (true, false, _
) => DropStyle
::Static
,
198 (true, true, false) => DropStyle
::Conditional
,
199 (true, true, true) => DropStyle
::Open
,
203 fn clear_drop_flag(&mut self, loc
: Location
, path
: Self::Path
, mode
: DropFlagMode
) {
205 DropFlagMode
::Shallow
=> {
206 self.ctxt
.set_drop_flag(loc
, path
, DropFlagState
::Absent
);
208 DropFlagMode
::Deep
=> {
209 on_all_children_bits(
212 self.ctxt
.move_data(),
214 |child
| self.ctxt
.set_drop_flag(loc
, child
, DropFlagState
::Absent
),
220 fn field_subpath(&self, path
: Self::Path
, field
: Field
) -> Option
<Self::Path
> {
221 rustc_mir_dataflow
::move_path_children_matching(self.ctxt
.move_data(), path
, |e
| match e
{
222 ProjectionElem
::Field(idx
, _
) => idx
== field
,
227 fn array_subpath(&self, path
: Self::Path
, index
: u64, size
: u64) -> Option
<Self::Path
> {
228 rustc_mir_dataflow
::move_path_children_matching(self.ctxt
.move_data(), path
, |e
| match e
{
229 ProjectionElem
::ConstantIndex { offset, min_length, from_end }
=> {
230 debug_assert
!(size
== min_length
, "min_length should be exact for arrays");
231 assert
!(!from_end
, "from_end should not be used for array element ConstantIndex");
238 fn deref_subpath(&self, path
: Self::Path
) -> Option
<Self::Path
> {
239 rustc_mir_dataflow
::move_path_children_matching(self.ctxt
.move_data(), path
, |e
| {
240 e
== ProjectionElem
::Deref
244 fn downcast_subpath(&self, path
: Self::Path
, variant
: VariantIdx
) -> Option
<Self::Path
> {
245 rustc_mir_dataflow
::move_path_children_matching(self.ctxt
.move_data(), path
, |e
| match e
{
246 ProjectionElem
::Downcast(_
, idx
) => idx
== variant
,
251 fn get_drop_flag(&mut self, path
: Self::Path
) -> Option
<Operand
<'tcx
>> {
252 self.ctxt
.drop_flag(path
).map(Operand
::Copy
)
256 struct ElaborateDropsCtxt
<'a
, 'tcx
> {
258 body
: &'a Body
<'tcx
>,
259 env
: &'a MoveDataParamEnv
<'tcx
>,
260 init_data
: InitializationData
<'a
, 'tcx
>,
261 drop_flags
: FxHashMap
<MovePathIndex
, Local
>,
262 patch
: MirPatch
<'tcx
>,
263 un_derefer
: UnDerefer
<'tcx
>,
266 impl<'b
, 'tcx
> ElaborateDropsCtxt
<'b
, 'tcx
> {
267 fn move_data(&self) -> &'b MoveData
<'tcx
> {
271 fn param_env(&self) -> ty
::ParamEnv
<'tcx
> {
275 fn create_drop_flag(&mut self, index
: MovePathIndex
, span
: Span
) {
277 let patch
= &mut self.patch
;
278 debug
!("create_drop_flag({:?})", self.body
.span
);
279 self.drop_flags
.entry(index
).or_insert_with(|| patch
.new_internal(tcx
.types
.bool
, span
));
282 fn drop_flag(&mut self, index
: MovePathIndex
) -> Option
<Place
<'tcx
>> {
283 self.drop_flags
.get(&index
).map(|t
| Place
::from(*t
))
286 /// create a patch that elaborates all drops in the input
288 fn elaborate(mut self) -> MirPatch
<'tcx
> {
289 self.collect_drop_flags();
291 self.elaborate_drops();
293 self.drop_flags_on_init();
294 self.drop_flags_for_fn_rets();
295 self.drop_flags_for_args();
296 self.drop_flags_for_locs();
301 fn collect_drop_flags(&mut self) {
302 for (bb
, data
) in self.body
.basic_blocks
.iter_enumerated() {
303 let terminator
= data
.terminator();
304 let place
= match terminator
.kind
{
305 TerminatorKind
::Drop { ref place, .. }
306 | TerminatorKind
::DropAndReplace { ref place, .. }
=> {
307 self.un_derefer
.derefer(place
.as_ref(), self.body
).unwrap_or(*place
)
312 self.init_data
.seek_before(self.body
.terminator_loc(bb
));
314 let path
= self.move_data().rev_lookup
.find(place
.as_ref());
315 debug
!("collect_drop_flags: {:?}, place {:?} ({:?})", bb
, place
, path
);
317 let path
= match path
{
318 LookupResult
::Exact(e
) => e
,
319 LookupResult
::Parent(None
) => continue,
320 LookupResult
::Parent(Some(parent
)) => {
321 let (_maybe_live
, maybe_dead
) = self.init_data
.maybe_live_dead(parent
);
323 if self.body
.local_decls
[place
.local
].is_deref_temp() {
328 self.tcx
.sess
.delay_span_bug(
329 terminator
.source_info
.span
,
331 "drop of untracked, uninitialized value {:?}, place {:?} ({:?})",
340 on_all_drop_children_bits(self.tcx
, self.body
, self.env
, path
, |child
| {
341 let (maybe_live
, maybe_dead
) = self.init_data
.maybe_live_dead(child
);
343 "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
347 (maybe_live
, maybe_dead
)
349 if maybe_live
&& maybe_dead
{
350 self.create_drop_flag(child
, terminator
.source_info
.span
)
356 fn elaborate_drops(&mut self) {
357 for (bb
, data
) in self.body
.basic_blocks
.iter_enumerated() {
358 let loc
= Location { block: bb, statement_index: data.statements.len() }
;
359 let terminator
= data
.terminator();
361 let resume_block
= self.patch
.resume_block();
362 match terminator
.kind
{
363 TerminatorKind
::Drop { mut place, target, unwind }
=> {
364 if let Some(new_place
) = self.un_derefer
.derefer(place
.as_ref(), self.body
) {
368 self.init_data
.seek_before(loc
);
369 match self.move_data().rev_lookup
.find(place
.as_ref()) {
370 LookupResult
::Exact(path
) => elaborate_drop(
371 &mut Elaborator { ctxt: self }
,
372 terminator
.source_info
,
379 Unwind
::To(Option
::unwrap_or(unwind
, resume_block
))
383 LookupResult
::Parent(..) => {
384 self.tcx
.sess
.delay_span_bug(
385 terminator
.source_info
.span
,
386 &format
!("drop of untracked value {:?}", bb
),
391 TerminatorKind
::DropAndReplace { mut place, ref value, target, unwind }
=> {
392 assert
!(!data
.is_cleanup
);
394 if let Some(new_place
) = self.un_derefer
.derefer(place
.as_ref(), self.body
) {
397 self.elaborate_replace(loc
, place
, value
, target
, unwind
);
404 /// Elaborate a MIR `replace` terminator. This instruction
405 /// is not directly handled by codegen, and therefore
406 /// must be desugared.
408 /// The desugaring drops the location if needed, and then writes
409 /// the value (including setting the drop flag) over it in *both* arms.
411 /// The `replace` terminator can also be called on places that
412 /// are not tracked by elaboration (for example,
413 /// `replace x[i] <- tmp0`). The borrow checker requires that
414 /// these locations are initialized before the assignment,
415 /// so we just generate an unconditional drop.
416 fn elaborate_replace(
420 value
: &Operand
<'tcx
>,
422 unwind
: Option
<BasicBlock
>,
425 let data
= &self.body
[bb
];
426 let terminator
= data
.terminator();
427 assert
!(!data
.is_cleanup
, "DropAndReplace in unwind path not supported");
429 let assign
= Statement
{
430 kind
: StatementKind
::Assign(Box
::new((place
, Rvalue
::Use(value
.clone())))),
431 source_info
: terminator
.source_info
,
434 let unwind
= unwind
.unwrap_or_else(|| self.patch
.resume_block());
435 let unwind
= self.patch
.new_block(BasicBlockData
{
436 statements
: vec
![assign
.clone()],
437 terminator
: Some(Terminator
{
438 kind
: TerminatorKind
::Goto { target: unwind }
,
444 let target
= self.patch
.new_block(BasicBlockData
{
445 statements
: vec
![assign
],
446 terminator
: Some(Terminator { kind: TerminatorKind::Goto { target }
, ..*terminator
}),
450 match self.move_data().rev_lookup
.find(place
.as_ref()) {
451 LookupResult
::Exact(path
) => {
452 debug
!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator
, path
);
453 self.init_data
.seek_before(loc
);
455 &mut Elaborator { ctxt: self }
,
456 terminator
.source_info
,
463 on_all_children_bits(self.tcx
, self.body
, self.move_data(), path
, |child
| {
465 Location { block: target, statement_index: 0 }
,
467 DropFlagState
::Present
,
470 Location { block: unwind, statement_index: 0 }
,
472 DropFlagState
::Present
,
476 LookupResult
::Parent(parent
) => {
477 // drop and replace behind a pointer/array/whatever. The location
478 // must be initialized.
479 debug
!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator
, parent
);
480 self.patch
.patch_terminator(
482 TerminatorKind
::Drop { place, target, unwind: Some(unwind) }
,
488 fn constant_bool(&self, span
: Span
, val
: bool
) -> Rvalue
<'tcx
> {
489 Rvalue
::Use(Operand
::Constant(Box
::new(Constant
{
492 literal
: ConstantKind
::from_bool(self.tcx
, val
),
496 fn set_drop_flag(&mut self, loc
: Location
, path
: MovePathIndex
, val
: DropFlagState
) {
497 if let Some(&flag
) = self.drop_flags
.get(&path
) {
498 let span
= self.patch
.source_info_for_location(self.body
, loc
).span
;
499 let val
= self.constant_bool(span
, val
.value());
500 self.patch
.add_assign(loc
, Place
::from(flag
), val
);
504 fn drop_flags_on_init(&mut self) {
505 let loc
= Location
::START
;
506 let span
= self.patch
.source_info_for_location(self.body
, loc
).span
;
507 let false_
= self.constant_bool(span
, false);
508 for flag
in self.drop_flags
.values() {
509 self.patch
.add_assign(loc
, Place
::from(*flag
), false_
.clone());
513 fn drop_flags_for_fn_rets(&mut self) {
514 for (bb
, data
) in self.body
.basic_blocks
.iter_enumerated() {
515 if let TerminatorKind
::Call
{
516 destination
, target
: Some(tgt
), cleanup
: Some(_
), ..
517 } = data
.terminator().kind
519 assert
!(!self.patch
.is_patched(bb
));
521 let loc
= Location { block: tgt, statement_index: 0 }
;
522 let path
= self.move_data().rev_lookup
.find(destination
.as_ref());
523 on_lookup_result_bits(self.tcx
, self.body
, self.move_data(), path
, |child
| {
524 self.set_drop_flag(loc
, child
, DropFlagState
::Present
)
530 fn drop_flags_for_args(&mut self) {
531 let loc
= Location
::START
;
532 rustc_mir_dataflow
::drop_flag_effects_for_function_entry(
537 self.set_drop_flag(loc
, path
, ds
);
542 fn drop_flags_for_locs(&mut self) {
543 // We intentionally iterate only over the *old* basic blocks.
545 // Basic blocks created by drop elaboration update their
546 // drop flags by themselves, to avoid the drop flags being
547 // clobbered before they are read.
549 for (bb
, data
) in self.body
.basic_blocks
.iter_enumerated() {
550 debug
!("drop_flags_for_locs({:?})", data
);
551 for i
in 0..(data
.statements
.len() + 1) {
552 debug
!("drop_flag_for_locs: stmt {}", i
);
553 let mut allow_initializations
= true;
554 if i
== data
.statements
.len() {
555 match data
.terminator().kind
{
556 TerminatorKind
::Drop { .. }
=> {
557 // drop elaboration should handle that by itself
560 TerminatorKind
::DropAndReplace { .. }
=> {
561 // this contains the move of the source and
562 // the initialization of the destination. We
563 // only want the former - the latter is handled
564 // by the elaboration code and must be done
565 // *after* the destination is dropped.
566 assert
!(self.patch
.is_patched(bb
));
567 allow_initializations
= false;
569 TerminatorKind
::Resume
=> {
570 // It is possible for `Resume` to be patched
571 // (in particular it can be patched to be replaced with
572 // a Goto; see `MirPatch::new`).
575 assert
!(!self.patch
.is_patched(bb
));
579 let loc
= Location { block: bb, statement_index: i }
;
580 rustc_mir_dataflow
::drop_flag_effects_for_location(
586 if ds
== DropFlagState
::Absent
|| allow_initializations
{
587 self.set_drop_flag(loc
, path
, ds
)
593 // There may be a critical edge after this call,
594 // so mark the return as initialized *before* the
596 if let TerminatorKind
::Call { destination, target: Some(_), cleanup: None, .. }
=
597 data
.terminator().kind
599 assert
!(!self.patch
.is_patched(bb
));
601 let loc
= Location { block: bb, statement_index: data.statements.len() }
;
602 let path
= self.move_data().rev_lookup
.find(destination
.as_ref());
603 on_lookup_result_bits(self.tcx
, self.body
, self.move_data(), path
, |child
| {
604 self.set_drop_flag(loc
, child
, DropFlagState
::Present
)