]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir_transform/src/elaborate_drops.rs
New upstream version 1.65.0+dfsg1
[rustc.git] / compiler / rustc_mir_transform / src / elaborate_drops.rs
1 use crate::deref_separator::deref_finder;
2 use crate::MirPass;
3 use rustc_data_structures::fx::FxHashMap;
4 use rustc_index::bit_set::BitSet;
5 use rustc_middle::mir::patch::MirPatch;
6 use rustc_middle::mir::*;
7 use rustc_middle::ty::{self, TyCtxt};
8 use rustc_mir_dataflow::elaborate_drops::{elaborate_drop, DropFlagState, Unwind};
9 use rustc_mir_dataflow::elaborate_drops::{DropElaborator, DropFlagMode, DropStyle};
10 use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
11 use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
12 use rustc_mir_dataflow::on_lookup_result_bits;
13 use rustc_mir_dataflow::un_derefer::UnDerefer;
14 use rustc_mir_dataflow::MoveDataParamEnv;
15 use rustc_mir_dataflow::{on_all_children_bits, on_all_drop_children_bits};
16 use rustc_mir_dataflow::{Analysis, ResultsCursor};
17 use rustc_span::Span;
18 use rustc_target::abi::VariantIdx;
19 use std::fmt;
20
21 pub struct ElaborateDrops;
22
23 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
24 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
25 debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
26
27 let def_id = body.source.def_id();
28 let param_env = tcx.param_env_reveal_all_normalized(def_id);
29 let (side_table, move_data) = match MoveData::gather_moves(body, tcx, param_env) {
30 Ok(move_data) => move_data,
31 Err((move_data, _)) => {
32 tcx.sess.delay_span_bug(
33 body.span,
34 "No `move_errors` should be allowed in MIR borrowck",
35 );
36 (Default::default(), move_data)
37 }
38 };
39 let un_derefer = UnDerefer { tcx: tcx, derefer_sidetable: side_table };
40 let elaborate_patch = {
41 let body = &*body;
42 let env = MoveDataParamEnv { move_data, param_env };
43 let dead_unwinds = find_dead_unwinds(tcx, body, &env, &un_derefer);
44
45 let inits = MaybeInitializedPlaces::new(tcx, body, &env)
46 .into_engine(tcx, body)
47 .dead_unwinds(&dead_unwinds)
48 .pass_name("elaborate_drops")
49 .iterate_to_fixpoint()
50 .into_results_cursor(body);
51
52 let uninits = MaybeUninitializedPlaces::new(tcx, body, &env)
53 .mark_inactive_variants_as_uninit()
54 .into_engine(tcx, body)
55 .dead_unwinds(&dead_unwinds)
56 .pass_name("elaborate_drops")
57 .iterate_to_fixpoint()
58 .into_results_cursor(body);
59
60 ElaborateDropsCtxt {
61 tcx,
62 body,
63 env: &env,
64 init_data: InitializationData { inits, uninits },
65 drop_flags: Default::default(),
66 patch: MirPatch::new(body),
67 un_derefer: un_derefer,
68 }
69 .elaborate()
70 };
71 elaborate_patch.apply(body);
72 deref_finder(tcx, body);
73 }
74 }
75
76 /// Returns the set of basic blocks whose unwind edges are known
77 /// to not be reachable, because they are `drop` terminators
78 /// that can't drop anything.
79 fn find_dead_unwinds<'tcx>(
80 tcx: TyCtxt<'tcx>,
81 body: &Body<'tcx>,
82 env: &MoveDataParamEnv<'tcx>,
83 und: &UnDerefer<'tcx>,
84 ) -> BitSet<BasicBlock> {
85 debug!("find_dead_unwinds({:?})", body.span);
86 // We only need to do this pass once, because unwind edges can only
87 // reach cleanup blocks, which can't have unwind edges themselves.
88 let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len());
89 let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env)
90 .into_engine(tcx, body)
91 .pass_name("find_dead_unwinds")
92 .iterate_to_fixpoint()
93 .into_results_cursor(body);
94 for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
95 let place = match bb_data.terminator().kind {
96 TerminatorKind::Drop { ref place, unwind: Some(_), .. }
97 | TerminatorKind::DropAndReplace { ref place, unwind: Some(_), .. } => {
98 und.derefer(place.as_ref(), body).unwrap_or(*place)
99 }
100 _ => continue,
101 };
102
103 debug!("find_dead_unwinds @ {:?}: {:?}", bb, bb_data);
104
105 let LookupResult::Exact(path) = env.move_data.rev_lookup.find(place.as_ref()) else {
106 debug!("find_dead_unwinds: has parent; skipping");
107 continue;
108 };
109
110 flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
111 debug!(
112 "find_dead_unwinds @ {:?}: path({:?})={:?}; init_data={:?}",
113 bb,
114 place,
115 path,
116 flow_inits.get()
117 );
118
119 let mut maybe_live = false;
120 on_all_drop_children_bits(tcx, body, &env, path, |child| {
121 maybe_live |= flow_inits.contains(child);
122 });
123
124 debug!("find_dead_unwinds @ {:?}: maybe_live={}", bb, maybe_live);
125 if !maybe_live {
126 dead_unwinds.insert(bb);
127 }
128 }
129
130 dead_unwinds
131 }
132
133 struct InitializationData<'mir, 'tcx> {
134 inits: ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
135 uninits: ResultsCursor<'mir, 'tcx, MaybeUninitializedPlaces<'mir, 'tcx>>,
136 }
137
138 impl InitializationData<'_, '_> {
139 fn seek_before(&mut self, loc: Location) {
140 self.inits.seek_before_primary_effect(loc);
141 self.uninits.seek_before_primary_effect(loc);
142 }
143
144 fn maybe_live_dead(&self, path: MovePathIndex) -> (bool, bool) {
145 (self.inits.contains(path), self.uninits.contains(path))
146 }
147 }
148
149 struct Elaborator<'a, 'b, 'tcx> {
150 ctxt: &'a mut ElaborateDropsCtxt<'b, 'tcx>,
151 }
152
153 impl fmt::Debug for Elaborator<'_, '_, '_> {
154 fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
155 Ok(())
156 }
157 }
158
159 impl<'a, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, '_, 'tcx> {
160 type Path = MovePathIndex;
161
162 fn patch(&mut self) -> &mut MirPatch<'tcx> {
163 &mut self.ctxt.patch
164 }
165
166 fn body(&self) -> &'a Body<'tcx> {
167 self.ctxt.body
168 }
169
170 fn tcx(&self) -> TyCtxt<'tcx> {
171 self.ctxt.tcx
172 }
173
174 fn param_env(&self) -> ty::ParamEnv<'tcx> {
175 self.ctxt.param_env()
176 }
177
178 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
179 let ((maybe_live, maybe_dead), multipart) = match mode {
180 DropFlagMode::Shallow => (self.ctxt.init_data.maybe_live_dead(path), false),
181 DropFlagMode::Deep => {
182 let mut some_live = false;
183 let mut some_dead = false;
184 let mut children_count = 0;
185 on_all_drop_children_bits(self.tcx(), self.body(), self.ctxt.env, path, |child| {
186 let (live, dead) = self.ctxt.init_data.maybe_live_dead(child);
187 debug!("elaborate_drop: state({:?}) = {:?}", child, (live, dead));
188 some_live |= live;
189 some_dead |= dead;
190 children_count += 1;
191 });
192 ((some_live, some_dead), children_count != 1)
193 }
194 };
195 match (maybe_live, maybe_dead, multipart) {
196 (false, _, _) => DropStyle::Dead,
197 (true, false, _) => DropStyle::Static,
198 (true, true, false) => DropStyle::Conditional,
199 (true, true, true) => DropStyle::Open,
200 }
201 }
202
203 fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
204 match mode {
205 DropFlagMode::Shallow => {
206 self.ctxt.set_drop_flag(loc, path, DropFlagState::Absent);
207 }
208 DropFlagMode::Deep => {
209 on_all_children_bits(
210 self.tcx(),
211 self.body(),
212 self.ctxt.move_data(),
213 path,
214 |child| self.ctxt.set_drop_flag(loc, child, DropFlagState::Absent),
215 );
216 }
217 }
218 }
219
220 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path> {
221 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
222 ProjectionElem::Field(idx, _) => idx == field,
223 _ => false,
224 })
225 }
226
227 fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path> {
228 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
229 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
230 debug_assert!(size == min_length, "min_length should be exact for arrays");
231 assert!(!from_end, "from_end should not be used for array element ConstantIndex");
232 offset == index
233 }
234 _ => false,
235 })
236 }
237
238 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
239 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| {
240 e == ProjectionElem::Deref
241 })
242 }
243
244 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path> {
245 rustc_mir_dataflow::move_path_children_matching(self.ctxt.move_data(), path, |e| match e {
246 ProjectionElem::Downcast(_, idx) => idx == variant,
247 _ => false,
248 })
249 }
250
251 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
252 self.ctxt.drop_flag(path).map(Operand::Copy)
253 }
254 }
255
256 struct ElaborateDropsCtxt<'a, 'tcx> {
257 tcx: TyCtxt<'tcx>,
258 body: &'a Body<'tcx>,
259 env: &'a MoveDataParamEnv<'tcx>,
260 init_data: InitializationData<'a, 'tcx>,
261 drop_flags: FxHashMap<MovePathIndex, Local>,
262 patch: MirPatch<'tcx>,
263 un_derefer: UnDerefer<'tcx>,
264 }
265
266 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
267 fn move_data(&self) -> &'b MoveData<'tcx> {
268 &self.env.move_data
269 }
270
271 fn param_env(&self) -> ty::ParamEnv<'tcx> {
272 self.env.param_env
273 }
274
275 fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
276 let tcx = self.tcx;
277 let patch = &mut self.patch;
278 debug!("create_drop_flag({:?})", self.body.span);
279 self.drop_flags.entry(index).or_insert_with(|| patch.new_internal(tcx.types.bool, span));
280 }
281
282 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
283 self.drop_flags.get(&index).map(|t| Place::from(*t))
284 }
285
286 /// create a patch that elaborates all drops in the input
287 /// MIR.
288 fn elaborate(mut self) -> MirPatch<'tcx> {
289 self.collect_drop_flags();
290
291 self.elaborate_drops();
292
293 self.drop_flags_on_init();
294 self.drop_flags_for_fn_rets();
295 self.drop_flags_for_args();
296 self.drop_flags_for_locs();
297
298 self.patch
299 }
300
301 fn collect_drop_flags(&mut self) {
302 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
303 let terminator = data.terminator();
304 let place = match terminator.kind {
305 TerminatorKind::Drop { ref place, .. }
306 | TerminatorKind::DropAndReplace { ref place, .. } => {
307 self.un_derefer.derefer(place.as_ref(), self.body).unwrap_or(*place)
308 }
309 _ => continue,
310 };
311
312 self.init_data.seek_before(self.body.terminator_loc(bb));
313
314 let path = self.move_data().rev_lookup.find(place.as_ref());
315 debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, place, path);
316
317 let path = match path {
318 LookupResult::Exact(e) => e,
319 LookupResult::Parent(None) => continue,
320 LookupResult::Parent(Some(parent)) => {
321 let (_maybe_live, maybe_dead) = self.init_data.maybe_live_dead(parent);
322
323 if self.body.local_decls[place.local].is_deref_temp() {
324 continue;
325 }
326
327 if maybe_dead {
328 self.tcx.sess.delay_span_bug(
329 terminator.source_info.span,
330 &format!(
331 "drop of untracked, uninitialized value {:?}, place {:?} ({:?})",
332 bb, place, path
333 ),
334 );
335 }
336 continue;
337 }
338 };
339
340 on_all_drop_children_bits(self.tcx, self.body, self.env, path, |child| {
341 let (maybe_live, maybe_dead) = self.init_data.maybe_live_dead(child);
342 debug!(
343 "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
344 child,
345 place,
346 path,
347 (maybe_live, maybe_dead)
348 );
349 if maybe_live && maybe_dead {
350 self.create_drop_flag(child, terminator.source_info.span)
351 }
352 });
353 }
354 }
355
356 fn elaborate_drops(&mut self) {
357 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
358 let loc = Location { block: bb, statement_index: data.statements.len() };
359 let terminator = data.terminator();
360
361 let resume_block = self.patch.resume_block();
362 match terminator.kind {
363 TerminatorKind::Drop { mut place, target, unwind } => {
364 if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) {
365 place = new_place;
366 }
367
368 self.init_data.seek_before(loc);
369 match self.move_data().rev_lookup.find(place.as_ref()) {
370 LookupResult::Exact(path) => elaborate_drop(
371 &mut Elaborator { ctxt: self },
372 terminator.source_info,
373 place,
374 path,
375 target,
376 if data.is_cleanup {
377 Unwind::InCleanup
378 } else {
379 Unwind::To(Option::unwrap_or(unwind, resume_block))
380 },
381 bb,
382 ),
383 LookupResult::Parent(..) => {
384 self.tcx.sess.delay_span_bug(
385 terminator.source_info.span,
386 &format!("drop of untracked value {:?}", bb),
387 );
388 }
389 }
390 }
391 TerminatorKind::DropAndReplace { mut place, ref value, target, unwind } => {
392 assert!(!data.is_cleanup);
393
394 if let Some(new_place) = self.un_derefer.derefer(place.as_ref(), self.body) {
395 place = new_place;
396 }
397 self.elaborate_replace(loc, place, value, target, unwind);
398 }
399 _ => continue,
400 }
401 }
402 }
403
404 /// Elaborate a MIR `replace` terminator. This instruction
405 /// is not directly handled by codegen, and therefore
406 /// must be desugared.
407 ///
408 /// The desugaring drops the location if needed, and then writes
409 /// the value (including setting the drop flag) over it in *both* arms.
410 ///
411 /// The `replace` terminator can also be called on places that
412 /// are not tracked by elaboration (for example,
413 /// `replace x[i] <- tmp0`). The borrow checker requires that
414 /// these locations are initialized before the assignment,
415 /// so we just generate an unconditional drop.
416 fn elaborate_replace(
417 &mut self,
418 loc: Location,
419 place: Place<'tcx>,
420 value: &Operand<'tcx>,
421 target: BasicBlock,
422 unwind: Option<BasicBlock>,
423 ) {
424 let bb = loc.block;
425 let data = &self.body[bb];
426 let terminator = data.terminator();
427 assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
428
429 let assign = Statement {
430 kind: StatementKind::Assign(Box::new((place, Rvalue::Use(value.clone())))),
431 source_info: terminator.source_info,
432 };
433
434 let unwind = unwind.unwrap_or_else(|| self.patch.resume_block());
435 let unwind = self.patch.new_block(BasicBlockData {
436 statements: vec![assign.clone()],
437 terminator: Some(Terminator {
438 kind: TerminatorKind::Goto { target: unwind },
439 ..*terminator
440 }),
441 is_cleanup: true,
442 });
443
444 let target = self.patch.new_block(BasicBlockData {
445 statements: vec![assign],
446 terminator: Some(Terminator { kind: TerminatorKind::Goto { target }, ..*terminator }),
447 is_cleanup: false,
448 });
449
450 match self.move_data().rev_lookup.find(place.as_ref()) {
451 LookupResult::Exact(path) => {
452 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
453 self.init_data.seek_before(loc);
454 elaborate_drop(
455 &mut Elaborator { ctxt: self },
456 terminator.source_info,
457 place,
458 path,
459 target,
460 Unwind::To(unwind),
461 bb,
462 );
463 on_all_children_bits(self.tcx, self.body, self.move_data(), path, |child| {
464 self.set_drop_flag(
465 Location { block: target, statement_index: 0 },
466 child,
467 DropFlagState::Present,
468 );
469 self.set_drop_flag(
470 Location { block: unwind, statement_index: 0 },
471 child,
472 DropFlagState::Present,
473 );
474 });
475 }
476 LookupResult::Parent(parent) => {
477 // drop and replace behind a pointer/array/whatever. The location
478 // must be initialized.
479 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
480 self.patch.patch_terminator(
481 bb,
482 TerminatorKind::Drop { place, target, unwind: Some(unwind) },
483 );
484 }
485 }
486 }
487
488 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
489 Rvalue::Use(Operand::Constant(Box::new(Constant {
490 span,
491 user_ty: None,
492 literal: ConstantKind::from_bool(self.tcx, val),
493 })))
494 }
495
496 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
497 if let Some(&flag) = self.drop_flags.get(&path) {
498 let span = self.patch.source_info_for_location(self.body, loc).span;
499 let val = self.constant_bool(span, val.value());
500 self.patch.add_assign(loc, Place::from(flag), val);
501 }
502 }
503
504 fn drop_flags_on_init(&mut self) {
505 let loc = Location::START;
506 let span = self.patch.source_info_for_location(self.body, loc).span;
507 let false_ = self.constant_bool(span, false);
508 for flag in self.drop_flags.values() {
509 self.patch.add_assign(loc, Place::from(*flag), false_.clone());
510 }
511 }
512
513 fn drop_flags_for_fn_rets(&mut self) {
514 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
515 if let TerminatorKind::Call {
516 destination, target: Some(tgt), cleanup: Some(_), ..
517 } = data.terminator().kind
518 {
519 assert!(!self.patch.is_patched(bb));
520
521 let loc = Location { block: tgt, statement_index: 0 };
522 let path = self.move_data().rev_lookup.find(destination.as_ref());
523 on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
524 self.set_drop_flag(loc, child, DropFlagState::Present)
525 });
526 }
527 }
528 }
529
530 fn drop_flags_for_args(&mut self) {
531 let loc = Location::START;
532 rustc_mir_dataflow::drop_flag_effects_for_function_entry(
533 self.tcx,
534 self.body,
535 self.env,
536 |path, ds| {
537 self.set_drop_flag(loc, path, ds);
538 },
539 )
540 }
541
542 fn drop_flags_for_locs(&mut self) {
543 // We intentionally iterate only over the *old* basic blocks.
544 //
545 // Basic blocks created by drop elaboration update their
546 // drop flags by themselves, to avoid the drop flags being
547 // clobbered before they are read.
548
549 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
550 debug!("drop_flags_for_locs({:?})", data);
551 for i in 0..(data.statements.len() + 1) {
552 debug!("drop_flag_for_locs: stmt {}", i);
553 let mut allow_initializations = true;
554 if i == data.statements.len() {
555 match data.terminator().kind {
556 TerminatorKind::Drop { .. } => {
557 // drop elaboration should handle that by itself
558 continue;
559 }
560 TerminatorKind::DropAndReplace { .. } => {
561 // this contains the move of the source and
562 // the initialization of the destination. We
563 // only want the former - the latter is handled
564 // by the elaboration code and must be done
565 // *after* the destination is dropped.
566 assert!(self.patch.is_patched(bb));
567 allow_initializations = false;
568 }
569 TerminatorKind::Resume => {
570 // It is possible for `Resume` to be patched
571 // (in particular it can be patched to be replaced with
572 // a Goto; see `MirPatch::new`).
573 }
574 _ => {
575 assert!(!self.patch.is_patched(bb));
576 }
577 }
578 }
579 let loc = Location { block: bb, statement_index: i };
580 rustc_mir_dataflow::drop_flag_effects_for_location(
581 self.tcx,
582 self.body,
583 self.env,
584 loc,
585 |path, ds| {
586 if ds == DropFlagState::Absent || allow_initializations {
587 self.set_drop_flag(loc, path, ds)
588 }
589 },
590 )
591 }
592
593 // There may be a critical edge after this call,
594 // so mark the return as initialized *before* the
595 // call.
596 if let TerminatorKind::Call { destination, target: Some(_), cleanup: None, .. } =
597 data.terminator().kind
598 {
599 assert!(!self.patch.is_patched(bb));
600
601 let loc = Location { block: bb, statement_index: data.statements.len() };
602 let path = self.move_data().rev_lookup.find(destination.as_ref());
603 on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
604 self.set_drop_flag(loc, child, DropFlagState::Present)
605 });
606 }
607 }
608 }
609 }