]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/transform/mod.rs
New upstream version 1.52.0~beta.3+dfsg1
[rustc.git] / compiler / rustc_mir / src / transform / mod.rs
1 use crate::{shim, util};
2 use required_consts::RequiredConstsVisitor;
3 use rustc_data_structures::fx::FxHashSet;
4 use rustc_data_structures::steal::Steal;
5 use rustc_hir as hir;
6 use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
7 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
8 use rustc_index::vec::IndexVec;
9 use rustc_middle::mir::visit::Visitor as _;
10 use rustc_middle::mir::{traversal, Body, ConstQualifs, MirPhase, Promoted};
11 use rustc_middle::ty::query::Providers;
12 use rustc_middle::ty::{self, TyCtxt, TypeFoldable};
13 use rustc_span::{Span, Symbol};
14 use std::borrow::Cow;
15
16 pub mod add_call_guards;
17 pub mod add_moves_for_packed_drops;
18 pub mod add_retag;
19 pub mod check_const_item_mutation;
20 pub mod check_consts;
21 pub mod check_packed_ref;
22 pub mod check_unsafety;
23 pub mod cleanup_post_borrowck;
24 pub mod const_debuginfo;
25 pub mod const_goto;
26 pub mod const_prop;
27 pub mod coverage;
28 pub mod deaggregator;
29 pub mod deduplicate_blocks;
30 pub mod dest_prop;
31 pub mod dump_mir;
32 pub mod early_otherwise_branch;
33 pub mod elaborate_drops;
34 pub mod function_item_references;
35 pub mod generator;
36 pub mod inline;
37 pub mod instcombine;
38 pub mod lower_intrinsics;
39 pub mod match_branches;
40 pub mod multiple_return_terminators;
41 pub mod no_landing_pads;
42 pub mod nrvo;
43 pub mod promote_consts;
44 pub mod remove_noop_landing_pads;
45 pub mod remove_storage_markers;
46 pub mod remove_unneeded_drops;
47 pub mod required_consts;
48 pub mod rustc_peek;
49 pub mod simplify;
50 pub mod simplify_branches;
51 pub mod simplify_comparison_integral;
52 pub mod simplify_try;
53 pub mod uninhabited_enum_branching;
54 pub mod unreachable_prop;
55 pub mod validate;
56
57 pub use rustc_middle::mir::MirSource;
58
59 pub(crate) fn provide(providers: &mut Providers) {
60 self::check_unsafety::provide(providers);
61 *providers = Providers {
62 mir_keys,
63 mir_const,
64 mir_const_qualif: |tcx, def_id| {
65 let def_id = def_id.expect_local();
66 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
67 tcx.mir_const_qualif_const_arg(def)
68 } else {
69 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
70 }
71 },
72 mir_const_qualif_const_arg: |tcx, (did, param_did)| {
73 mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
74 },
75 mir_promoted,
76 mir_drops_elaborated_and_const_checked,
77 mir_for_ctfe,
78 mir_for_ctfe_of_const_arg,
79 optimized_mir,
80 is_mir_available,
81 is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
82 promoted_mir: |tcx, def_id| {
83 let def_id = def_id.expect_local();
84 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
85 tcx.promoted_mir_of_const_arg(def)
86 } else {
87 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
88 }
89 },
90 promoted_mir_of_const_arg: |tcx, (did, param_did)| {
91 promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
92 },
93 ..*providers
94 };
95 coverage::query::provide(providers);
96 }
97
98 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
99 tcx.mir_keys(def_id.krate).contains(&def_id.expect_local())
100 }
101
102 /// Finds the full set of `DefId`s within the current crate that have
103 /// MIR associated with them.
104 fn mir_keys(tcx: TyCtxt<'_>, krate: CrateNum) -> FxHashSet<LocalDefId> {
105 assert_eq!(krate, LOCAL_CRATE);
106
107 let mut set = FxHashSet::default();
108
109 // All body-owners have MIR associated with them.
110 set.extend(tcx.body_owners());
111
112 // Additionally, tuple struct/variant constructors have MIR, but
113 // they don't have a BodyId, so we need to build them separately.
114 struct GatherCtors<'a, 'tcx> {
115 tcx: TyCtxt<'tcx>,
116 set: &'a mut FxHashSet<LocalDefId>,
117 }
118 impl<'a, 'tcx> Visitor<'tcx> for GatherCtors<'a, 'tcx> {
119 fn visit_variant_data(
120 &mut self,
121 v: &'tcx hir::VariantData<'tcx>,
122 _: Symbol,
123 _: &'tcx hir::Generics<'tcx>,
124 _: hir::HirId,
125 _: Span,
126 ) {
127 if let hir::VariantData::Tuple(_, hir_id) = *v {
128 self.set.insert(self.tcx.hir().local_def_id(hir_id));
129 }
130 intravisit::walk_struct_def(self, v)
131 }
132 type Map = intravisit::ErasedMap<'tcx>;
133 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
134 NestedVisitorMap::None
135 }
136 }
137 tcx.hir()
138 .krate()
139 .visit_all_item_likes(&mut GatherCtors { tcx, set: &mut set }.as_deep_visitor());
140
141 set
142 }
143
144 /// Generates a default name for the pass based on the name of the
145 /// type `T`.
146 pub fn default_name<T: ?Sized>() -> Cow<'static, str> {
147 let name = std::any::type_name::<T>();
148 if let Some(tail) = name.rfind(':') { Cow::from(&name[tail + 1..]) } else { Cow::from(name) }
149 }
150
151 /// A streamlined trait that you can implement to create a pass; the
152 /// pass will be named after the type, and it will consist of a main
153 /// loop that goes over each available MIR and applies `run_pass`.
154 pub trait MirPass<'tcx> {
155 fn name(&self) -> Cow<'_, str> {
156 default_name::<Self>()
157 }
158
159 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>);
160 }
161
162 pub fn run_passes(
163 tcx: TyCtxt<'tcx>,
164 body: &mut Body<'tcx>,
165 mir_phase: MirPhase,
166 passes: &[&[&dyn MirPass<'tcx>]],
167 ) {
168 let phase_index = mir_phase.phase_index();
169 let validate = tcx.sess.opts.debugging_opts.validate_mir;
170
171 if body.phase >= mir_phase {
172 return;
173 }
174
175 if validate {
176 validate::Validator { when: format!("input to phase {:?}", mir_phase), mir_phase }
177 .run_pass(tcx, body);
178 }
179
180 let mut index = 0;
181 let mut run_pass = |pass: &dyn MirPass<'tcx>| {
182 let run_hooks = |body: &_, index, is_after| {
183 dump_mir::on_mir_pass(
184 tcx,
185 &format_args!("{:03}-{:03}", phase_index, index),
186 &pass.name(),
187 body,
188 is_after,
189 );
190 };
191 run_hooks(body, index, false);
192 pass.run_pass(tcx, body);
193 run_hooks(body, index, true);
194
195 if validate {
196 validate::Validator {
197 when: format!("after {} in phase {:?}", pass.name(), mir_phase),
198 mir_phase,
199 }
200 .run_pass(tcx, body);
201 }
202
203 index += 1;
204 };
205
206 for pass_group in passes {
207 for pass in *pass_group {
208 run_pass(*pass);
209 }
210 }
211
212 body.phase = mir_phase;
213
214 if mir_phase == MirPhase::Optimization {
215 validate::Validator { when: format!("end of phase {:?}", mir_phase), mir_phase }
216 .run_pass(tcx, body);
217 }
218 }
219
220 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
221 let const_kind = tcx.hir().body_const_context(def.did);
222
223 // No need to const-check a non-const `fn`.
224 if const_kind.is_none() {
225 return Default::default();
226 }
227
228 // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
229 // cannot yet be stolen), because `mir_promoted()`, which steals
230 // from `mir_const(), forces this query to execute before
231 // performing the steal.
232 let body = &tcx.mir_const(def).borrow();
233
234 if body.return_ty().references_error() {
235 tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
236 return Default::default();
237 }
238
239 let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
240
241 let mut validator = check_consts::validation::Validator::new(&ccx);
242 validator.check_body();
243
244 // We return the qualifs in the return place for every MIR body, even though it is only used
245 // when deciding to promote a reference to a `const` for now.
246 validator.qualifs_in_return_place()
247 }
248
249 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
250 fn mir_const<'tcx>(
251 tcx: TyCtxt<'tcx>,
252 def: ty::WithOptConstParam<LocalDefId>,
253 ) -> &'tcx Steal<Body<'tcx>> {
254 if let Some(def) = def.try_upgrade(tcx) {
255 return tcx.mir_const(def);
256 }
257
258 // Unsafety check uses the raw mir, so make sure it is run.
259 if let Some(param_did) = def.const_param_did {
260 tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
261 } else {
262 tcx.ensure().unsafety_check_result(def.did);
263 }
264
265 let mut body = tcx.mir_built(def).steal();
266
267 util::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
268
269 run_passes(
270 tcx,
271 &mut body,
272 MirPhase::Const,
273 &[&[
274 // MIR-level lints.
275 &check_packed_ref::CheckPackedRef,
276 &check_const_item_mutation::CheckConstItemMutation,
277 &function_item_references::FunctionItemReferences,
278 // What we need to do constant evaluation.
279 &simplify::SimplifyCfg::new("initial"),
280 &rustc_peek::SanityCheck,
281 ]],
282 );
283 tcx.alloc_steal_mir(body)
284 }
285
286 /// Compute the main MIR body and the list of MIR bodies of the promoteds.
287 fn mir_promoted(
288 tcx: TyCtxt<'tcx>,
289 def: ty::WithOptConstParam<LocalDefId>,
290 ) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
291 if let Some(def) = def.try_upgrade(tcx) {
292 return tcx.mir_promoted(def);
293 }
294
295 // Ensure that we compute the `mir_const_qualif` for constants at
296 // this point, before we steal the mir-const result.
297 // Also this means promotion can rely on all const checks having been done.
298 let _ = tcx.mir_const_qualif_opt_const_arg(def);
299 let _ = tcx.mir_abstract_const_opt_const_arg(def.to_global());
300 let mut body = tcx.mir_const(def).steal();
301
302 let mut required_consts = Vec::new();
303 let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
304 for (bb, bb_data) in traversal::reverse_postorder(&body) {
305 required_consts_visitor.visit_basic_block_data(bb, bb_data);
306 }
307 body.required_consts = required_consts;
308
309 let promote_pass = promote_consts::PromoteTemps::default();
310 let promote: &[&dyn MirPass<'tcx>] = &[
311 // What we need to run borrowck etc.
312 &promote_pass,
313 &simplify::SimplifyCfg::new("promote-consts"),
314 ];
315
316 let opt_coverage: &[&dyn MirPass<'tcx>] = if tcx.sess.opts.debugging_opts.instrument_coverage {
317 &[&coverage::InstrumentCoverage]
318 } else {
319 &[]
320 };
321
322 run_passes(tcx, &mut body, MirPhase::ConstPromotion, &[promote, opt_coverage]);
323
324 let promoted = promote_pass.promoted_fragments.into_inner();
325 (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
326 }
327
328 /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
329 fn mir_for_ctfe<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
330 let did = def_id.expect_local();
331 if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
332 tcx.mir_for_ctfe_of_const_arg(def)
333 } else {
334 tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
335 }
336 }
337
338 /// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
339 /// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
340 /// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
341 /// the const parameter while type checking the main body, which in turn would try
342 /// to type check the main body again.
343 fn mir_for_ctfe_of_const_arg<'tcx>(
344 tcx: TyCtxt<'tcx>,
345 (did, param_did): (LocalDefId, DefId),
346 ) -> &'tcx Body<'tcx> {
347 tcx.arena.alloc(inner_mir_for_ctfe(
348 tcx,
349 ty::WithOptConstParam { did, const_param_did: Some(param_did) },
350 ))
351 }
352
353 fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
354 // FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
355 if tcx.is_constructor(def.did.to_def_id()) {
356 // There's no reason to run all of the MIR passes on constructors when
357 // we can just output the MIR we want directly. This also saves const
358 // qualification and borrow checking the trouble of special casing
359 // constructors.
360 return shim::build_adt_ctor(tcx, def.did.to_def_id());
361 }
362
363 let context = tcx
364 .hir()
365 .body_const_context(def.did)
366 .expect("mir_for_ctfe should not be used for runtime functions");
367
368 let mut body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
369
370 match context {
371 // Do not const prop functions, either they get executed at runtime or exported to metadata,
372 // so we run const prop on them, or they don't, in which case we const evaluate some control
373 // flow paths of the function and any errors in those paths will get emitted as const eval
374 // errors.
375 hir::ConstContext::ConstFn => {}
376 // Static items always get evaluated, so we can just let const eval see if any erroneous
377 // control flow paths get executed.
378 hir::ConstContext::Static(_) => {}
379 // Associated constants get const prop run so we detect common failure situations in the
380 // crate that defined the constant.
381 // Technically we want to not run on regular const items, but oli-obk doesn't know how to
382 // conveniently detect that at this point without looking at the HIR.
383 hir::ConstContext::Const => {
384 #[rustfmt::skip]
385 let optimizations: &[&dyn MirPass<'_>] = &[
386 &const_prop::ConstProp,
387 ];
388
389 #[rustfmt::skip]
390 run_passes(
391 tcx,
392 &mut body,
393 MirPhase::Optimization,
394 &[
395 optimizations,
396 ],
397 );
398 }
399 }
400
401 debug_assert!(!body.has_free_regions(), "Free regions in MIR for CTFE");
402
403 body
404 }
405
406 /// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
407 /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
408 /// end up missing the source MIR due to stealing happening.
409 fn mir_drops_elaborated_and_const_checked<'tcx>(
410 tcx: TyCtxt<'tcx>,
411 def: ty::WithOptConstParam<LocalDefId>,
412 ) -> &'tcx Steal<Body<'tcx>> {
413 if let Some(def) = def.try_upgrade(tcx) {
414 return tcx.mir_drops_elaborated_and_const_checked(def);
415 }
416
417 // (Mir-)Borrowck uses `mir_promoted`, so we have to force it to
418 // execute before we can steal.
419 if let Some(param_did) = def.const_param_did {
420 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
421 } else {
422 tcx.ensure().mir_borrowck(def.did);
423 }
424
425 let hir_id = tcx.hir().local_def_id_to_hir_id(def.did);
426 use rustc_middle::hir::map::blocks::FnLikeNode;
427 let is_fn_like = FnLikeNode::from_node(tcx.hir().get(hir_id)).is_some();
428 if is_fn_like {
429 let did = def.did.to_def_id();
430 let def = ty::WithOptConstParam::unknown(did);
431
432 // Do not compute the mir call graph without said call graph actually being used.
433 if inline::is_enabled(tcx) {
434 let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def));
435 }
436 }
437
438 let (body, _) = tcx.mir_promoted(def);
439 let mut body = body.steal();
440
441 run_post_borrowck_cleanup_passes(tcx, &mut body);
442 check_consts::post_drop_elaboration::check_live_drops(tcx, &body);
443 tcx.alloc_steal_mir(body)
444 }
445
446 /// After this series of passes, no lifetime analysis based on borrowing can be done.
447 fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
448 debug!("post_borrowck_cleanup({:?})", body.source.def_id());
449
450 let post_borrowck_cleanup: &[&dyn MirPass<'tcx>] = &[
451 // Remove all things only needed by analysis
452 &no_landing_pads::NoLandingPads,
453 &simplify_branches::SimplifyBranches::new("initial"),
454 &remove_noop_landing_pads::RemoveNoopLandingPads,
455 &cleanup_post_borrowck::CleanupNonCodegenStatements,
456 &simplify::SimplifyCfg::new("early-opt"),
457 // These next passes must be executed together
458 &add_call_guards::CriticalCallEdges,
459 &elaborate_drops::ElaborateDrops,
460 &no_landing_pads::NoLandingPads,
461 // AddMovesForPackedDrops needs to run after drop
462 // elaboration.
463 &add_moves_for_packed_drops::AddMovesForPackedDrops,
464 // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
465 // but before optimizations begin.
466 &add_retag::AddRetag,
467 &lower_intrinsics::LowerIntrinsics,
468 &simplify::SimplifyCfg::new("elaborate-drops"),
469 // `Deaggregator` is conceptually part of MIR building, some backends rely on it happening
470 // and it can help optimizations.
471 &deaggregator::Deaggregator,
472 ];
473
474 run_passes(tcx, body, MirPhase::DropLowering, &[post_borrowck_cleanup]);
475 }
476
477 fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
478 let mir_opt_level = tcx.sess.mir_opt_level();
479
480 // Lowering generator control-flow and variables has to happen before we do anything else
481 // to them. We run some optimizations before that, because they may be harder to do on the state
482 // machine than on MIR with async primitives.
483 let optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[
484 &unreachable_prop::UnreachablePropagation,
485 &uninhabited_enum_branching::UninhabitedEnumBranching,
486 &simplify::SimplifyCfg::new("after-uninhabited-enum-branching"),
487 &inline::Inline,
488 &generator::StateTransform,
489 ];
490
491 // Even if we don't do optimizations, we still have to lower generators for codegen.
492 let no_optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[&generator::StateTransform];
493
494 // The main optimizations that we do on MIR.
495 let optimizations: &[&dyn MirPass<'tcx>] = &[
496 &remove_storage_markers::RemoveStorageMarkers,
497 &const_goto::ConstGoto,
498 &remove_unneeded_drops::RemoveUnneededDrops,
499 &match_branches::MatchBranchSimplification,
500 // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
501 &multiple_return_terminators::MultipleReturnTerminators,
502 &instcombine::InstCombine,
503 &const_prop::ConstProp,
504 &simplify_branches::SimplifyBranches::new("after-const-prop"),
505 &early_otherwise_branch::EarlyOtherwiseBranch,
506 &simplify_comparison_integral::SimplifyComparisonIntegral,
507 &simplify_try::SimplifyArmIdentity,
508 &simplify_try::SimplifyBranchSame,
509 &dest_prop::DestinationPropagation,
510 &simplify_branches::SimplifyBranches::new("final"),
511 &remove_noop_landing_pads::RemoveNoopLandingPads,
512 &simplify::SimplifyCfg::new("final"),
513 &nrvo::RenameReturnPlace,
514 &const_debuginfo::ConstDebugInfo,
515 &simplify::SimplifyLocals,
516 &multiple_return_terminators::MultipleReturnTerminators,
517 &deduplicate_blocks::DeduplicateBlocks,
518 ];
519
520 // Optimizations to run even if mir optimizations have been disabled.
521 let no_optimizations: &[&dyn MirPass<'tcx>] = &[
522 // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
523 &const_prop::ConstProp,
524 ];
525
526 // Some cleanup necessary at least for LLVM and potentially other codegen backends.
527 let pre_codegen_cleanup: &[&dyn MirPass<'tcx>] = &[
528 &add_call_guards::CriticalCallEdges,
529 // Dump the end result for testing and debugging purposes.
530 &dump_mir::Marker("PreCodegen"),
531 ];
532
533 // End of pass declarations, now actually run the passes.
534 // Generator Lowering
535 #[rustfmt::skip]
536 run_passes(
537 tcx,
538 body,
539 MirPhase::GeneratorLowering,
540 &[
541 if mir_opt_level > 0 {
542 optimizations_with_generators
543 } else {
544 no_optimizations_with_generators
545 }
546 ],
547 );
548
549 // Main optimization passes
550 #[rustfmt::skip]
551 run_passes(
552 tcx,
553 body,
554 MirPhase::Optimization,
555 &[
556 if mir_opt_level > 0 { optimizations } else { no_optimizations },
557 pre_codegen_cleanup,
558 ],
559 );
560 }
561
562 /// Optimize the MIR and prepare it for codegen.
563 fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
564 let did = did.expect_local();
565 assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None);
566 tcx.arena.alloc(inner_optimized_mir(tcx, did))
567 }
568
569 fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
570 if tcx.is_constructor(did.to_def_id()) {
571 // There's no reason to run all of the MIR passes on constructors when
572 // we can just output the MIR we want directly. This also saves const
573 // qualification and borrow checking the trouble of special casing
574 // constructors.
575 return shim::build_adt_ctor(tcx, did.to_def_id());
576 }
577
578 match tcx.hir().body_const_context(did) {
579 // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
580 // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
581 // computes and caches its result.
582 Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did),
583 None => {}
584 Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
585 }
586 let mut body =
587 tcx.mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)).steal();
588 run_optimization_passes(tcx, &mut body);
589
590 debug_assert!(!body.has_free_regions(), "Free regions in optimized MIR");
591
592 body
593 }
594
595 /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
596 /// constant evaluation once all substitutions become known.
597 fn promoted_mir<'tcx>(
598 tcx: TyCtxt<'tcx>,
599 def: ty::WithOptConstParam<LocalDefId>,
600 ) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
601 if tcx.is_constructor(def.did.to_def_id()) {
602 return tcx.arena.alloc(IndexVec::new());
603 }
604
605 if let Some(param_did) = def.const_param_did {
606 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
607 } else {
608 tcx.ensure().mir_borrowck(def.did);
609 }
610 let (_, promoted) = tcx.mir_promoted(def);
611 let mut promoted = promoted.steal();
612
613 for body in &mut promoted {
614 run_post_borrowck_cleanup_passes(tcx, body);
615 }
616
617 debug_assert!(!promoted.has_free_regions(), "Free regions in promoted MIR");
618
619 tcx.arena.alloc(promoted)
620 }