]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir_transform/src/lib.rs
New upstream version 1.58.1+dfsg1
[rustc.git] / compiler / rustc_mir_transform / src / lib.rs
1 #![feature(box_patterns)]
2 #![feature(box_syntax)]
3 #![feature(crate_visibility_modifier)]
4 #![feature(in_band_lifetimes)]
5 #![feature(iter_zip)]
6 #![feature(let_else)]
7 #![feature(map_try_insert)]
8 #![feature(min_specialization)]
9 #![feature(option_get_or_insert_default)]
10 #![feature(once_cell)]
11 #![feature(never_type)]
12 #![feature(trusted_step)]
13 #![feature(try_blocks)]
14 #![recursion_limit = "256"]
15
16 #[macro_use]
17 extern crate tracing;
18 #[macro_use]
19 extern crate rustc_middle;
20
21 use required_consts::RequiredConstsVisitor;
22 use rustc_const_eval::util;
23 use rustc_data_structures::fx::FxHashSet;
24 use rustc_data_structures::steal::Steal;
25 use rustc_hir as hir;
26 use rustc_hir::def_id::{DefId, LocalDefId};
27 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
28 use rustc_index::vec::IndexVec;
29 use rustc_middle::mir::visit::Visitor as _;
30 use rustc_middle::mir::{dump_mir, traversal, Body, ConstQualifs, MirPhase, Promoted};
31 use rustc_middle::ty::query::Providers;
32 use rustc_middle::ty::{self, TyCtxt, TypeFoldable};
33 use rustc_span::{Span, Symbol};
34
35 mod abort_unwinding_calls;
36 mod add_call_guards;
37 mod add_moves_for_packed_drops;
38 mod add_retag;
39 mod check_const_item_mutation;
40 mod check_packed_ref;
41 pub mod check_unsafety;
42 mod cleanup_post_borrowck;
43 mod const_debuginfo;
44 mod const_goto;
45 mod const_prop;
46 mod coverage;
47 mod deaggregator;
48 mod deduplicate_blocks;
49 mod dest_prop;
50 pub mod dump_mir;
51 mod early_otherwise_branch;
52 mod elaborate_drops;
53 mod function_item_references;
54 mod generator;
55 mod inline;
56 mod instcombine;
57 mod lower_intrinsics;
58 mod lower_slice_len;
59 mod match_branches;
60 mod multiple_return_terminators;
61 mod normalize_array_len;
62 mod nrvo;
63 mod remove_noop_landing_pads;
64 mod remove_storage_markers;
65 mod remove_unneeded_drops;
66 mod remove_zsts;
67 mod required_consts;
68 mod reveal_all;
69 mod separate_const_switch;
70 mod shim;
71 mod simplify;
72 mod simplify_branches;
73 mod simplify_comparison_integral;
74 mod simplify_try;
75 mod uninhabited_enum_branching;
76 mod unreachable_prop;
77
78 use rustc_const_eval::transform::check_consts;
79 use rustc_const_eval::transform::promote_consts;
80 use rustc_const_eval::transform::validate;
81 pub use rustc_const_eval::transform::MirPass;
82 use rustc_mir_dataflow::rustc_peek;
83
84 pub fn provide(providers: &mut Providers) {
85 check_unsafety::provide(providers);
86 check_packed_ref::provide(providers);
87 coverage::query::provide(providers);
88 shim::provide(providers);
89 *providers = Providers {
90 mir_keys,
91 mir_const,
92 mir_const_qualif: |tcx, def_id| {
93 let def_id = def_id.expect_local();
94 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
95 tcx.mir_const_qualif_const_arg(def)
96 } else {
97 mir_const_qualif(tcx, ty::WithOptConstParam::unknown(def_id))
98 }
99 },
100 mir_const_qualif_const_arg: |tcx, (did, param_did)| {
101 mir_const_qualif(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
102 },
103 mir_promoted,
104 mir_drops_elaborated_and_const_checked,
105 mir_for_ctfe,
106 mir_for_ctfe_of_const_arg,
107 optimized_mir,
108 is_mir_available,
109 is_ctfe_mir_available: |tcx, did| is_mir_available(tcx, did),
110 mir_callgraph_reachable: inline::cycle::mir_callgraph_reachable,
111 mir_inliner_callees: inline::cycle::mir_inliner_callees,
112 promoted_mir: |tcx, def_id| {
113 let def_id = def_id.expect_local();
114 if let Some(def) = ty::WithOptConstParam::try_lookup(def_id, tcx) {
115 tcx.promoted_mir_of_const_arg(def)
116 } else {
117 promoted_mir(tcx, ty::WithOptConstParam::unknown(def_id))
118 }
119 },
120 promoted_mir_of_const_arg: |tcx, (did, param_did)| {
121 promoted_mir(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) })
122 },
123 ..*providers
124 };
125 }
126
127 fn is_mir_available(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
128 let def_id = def_id.expect_local();
129 tcx.mir_keys(()).contains(&def_id)
130 }
131
132 /// Finds the full set of `DefId`s within the current crate that have
133 /// MIR associated with them.
134 fn mir_keys(tcx: TyCtxt<'_>, (): ()) -> FxHashSet<LocalDefId> {
135 let mut set = FxHashSet::default();
136
137 // All body-owners have MIR associated with them.
138 set.extend(tcx.hir().body_owners());
139
140 // Additionally, tuple struct/variant constructors have MIR, but
141 // they don't have a BodyId, so we need to build them separately.
142 struct GatherCtors<'a, 'tcx> {
143 tcx: TyCtxt<'tcx>,
144 set: &'a mut FxHashSet<LocalDefId>,
145 }
146 impl<'a, 'tcx> Visitor<'tcx> for GatherCtors<'a, 'tcx> {
147 fn visit_variant_data(
148 &mut self,
149 v: &'tcx hir::VariantData<'tcx>,
150 _: Symbol,
151 _: &'tcx hir::Generics<'tcx>,
152 _: hir::HirId,
153 _: Span,
154 ) {
155 if let hir::VariantData::Tuple(_, hir_id) = *v {
156 self.set.insert(self.tcx.hir().local_def_id(hir_id));
157 }
158 intravisit::walk_struct_def(self, v)
159 }
160 type Map = intravisit::ErasedMap<'tcx>;
161 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
162 NestedVisitorMap::None
163 }
164 }
165 tcx.hir().visit_all_item_likes(&mut GatherCtors { tcx, set: &mut set }.as_deep_visitor());
166
167 set
168 }
169
170 fn run_passes(
171 tcx: TyCtxt<'tcx>,
172 body: &mut Body<'tcx>,
173 mir_phase: MirPhase,
174 passes: &[&[&dyn MirPass<'tcx>]],
175 ) {
176 let phase_index = mir_phase.phase_index();
177 let validate = tcx.sess.opts.debugging_opts.validate_mir;
178
179 if body.phase >= mir_phase {
180 return;
181 }
182
183 if validate {
184 validate::Validator { when: format!("input to phase {:?}", mir_phase), mir_phase }
185 .run_pass(tcx, body);
186 }
187
188 let mut index = 0;
189 let mut run_pass = |pass: &dyn MirPass<'tcx>| {
190 let run_hooks = |body: &_, index, is_after| {
191 let disambiguator = if is_after { "after" } else { "before" };
192 dump_mir(
193 tcx,
194 Some(&format_args!("{:03}-{:03}", phase_index, index)),
195 &pass.name(),
196 &disambiguator,
197 body,
198 |_, _| Ok(()),
199 );
200 };
201 run_hooks(body, index, false);
202 pass.run_pass(tcx, body);
203 run_hooks(body, index, true);
204
205 if validate {
206 validate::Validator {
207 when: format!("after {} in phase {:?}", pass.name(), mir_phase),
208 mir_phase,
209 }
210 .run_pass(tcx, body);
211 }
212
213 index += 1;
214 };
215
216 for pass_group in passes {
217 for pass in *pass_group {
218 run_pass(*pass);
219 }
220 }
221
222 body.phase = mir_phase;
223
224 if mir_phase == MirPhase::Optimization {
225 validate::Validator { when: format!("end of phase {:?}", mir_phase), mir_phase }
226 .run_pass(tcx, body);
227 }
228 }
229
230 fn mir_const_qualif(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> ConstQualifs {
231 let const_kind = tcx.hir().body_const_context(def.did);
232
233 // No need to const-check a non-const `fn`.
234 if const_kind.is_none() {
235 return Default::default();
236 }
237
238 // N.B., this `borrow()` is guaranteed to be valid (i.e., the value
239 // cannot yet be stolen), because `mir_promoted()`, which steals
240 // from `mir_const(), forces this query to execute before
241 // performing the steal.
242 let body = &tcx.mir_const(def).borrow();
243
244 if body.return_ty().references_error() {
245 tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors");
246 return Default::default();
247 }
248
249 let ccx = check_consts::ConstCx { body, tcx, const_kind, param_env: tcx.param_env(def.did) };
250
251 let mut validator = check_consts::check::Checker::new(&ccx);
252 validator.check_body();
253
254 // We return the qualifs in the return place for every MIR body, even though it is only used
255 // when deciding to promote a reference to a `const` for now.
256 validator.qualifs_in_return_place()
257 }
258
259 /// Make MIR ready for const evaluation. This is run on all MIR, not just on consts!
260 fn mir_const<'tcx>(
261 tcx: TyCtxt<'tcx>,
262 def: ty::WithOptConstParam<LocalDefId>,
263 ) -> &'tcx Steal<Body<'tcx>> {
264 if let Some(def) = def.try_upgrade(tcx) {
265 return tcx.mir_const(def);
266 }
267
268 // Unsafety check uses the raw mir, so make sure it is run.
269 if !tcx.sess.opts.debugging_opts.thir_unsafeck {
270 if let Some(param_did) = def.const_param_did {
271 tcx.ensure().unsafety_check_result_for_const_arg((def.did, param_did));
272 } else {
273 tcx.ensure().unsafety_check_result(def.did);
274 }
275 }
276
277 let mut body = tcx.mir_built(def).steal();
278
279 rustc_middle::mir::dump_mir(tcx, None, "mir_map", &0, &body, |_, _| Ok(()));
280
281 run_passes(
282 tcx,
283 &mut body,
284 MirPhase::Const,
285 &[&[
286 // MIR-level lints.
287 &check_packed_ref::CheckPackedRef,
288 &check_const_item_mutation::CheckConstItemMutation,
289 &function_item_references::FunctionItemReferences,
290 // What we need to do constant evaluation.
291 &simplify::SimplifyCfg::new("initial"),
292 &rustc_peek::SanityCheck,
293 ]],
294 );
295 tcx.alloc_steal_mir(body)
296 }
297
298 /// Compute the main MIR body and the list of MIR bodies of the promoteds.
299 fn mir_promoted(
300 tcx: TyCtxt<'tcx>,
301 def: ty::WithOptConstParam<LocalDefId>,
302 ) -> (&'tcx Steal<Body<'tcx>>, &'tcx Steal<IndexVec<Promoted, Body<'tcx>>>) {
303 if let Some(def) = def.try_upgrade(tcx) {
304 return tcx.mir_promoted(def);
305 }
306
307 // Ensure that we compute the `mir_const_qualif` for constants at
308 // this point, before we steal the mir-const result.
309 // Also this means promotion can rely on all const checks having been done.
310 let _ = tcx.mir_const_qualif_opt_const_arg(def);
311 let mut body = tcx.mir_const(def).steal();
312
313 let mut required_consts = Vec::new();
314 let mut required_consts_visitor = RequiredConstsVisitor::new(&mut required_consts);
315 for (bb, bb_data) in traversal::reverse_postorder(&body) {
316 required_consts_visitor.visit_basic_block_data(bb, bb_data);
317 }
318 body.required_consts = required_consts;
319
320 let promote_pass = promote_consts::PromoteTemps::default();
321 let promote: &[&dyn MirPass<'tcx>] = &[
322 // What we need to run borrowck etc.
323 &promote_pass,
324 &simplify::SimplifyCfg::new("promote-consts"),
325 ];
326
327 let opt_coverage: &[&dyn MirPass<'tcx>] =
328 if tcx.sess.instrument_coverage() { &[&coverage::InstrumentCoverage] } else { &[] };
329
330 run_passes(tcx, &mut body, MirPhase::ConstPromotion, &[promote, opt_coverage]);
331
332 let promoted = promote_pass.promoted_fragments.into_inner();
333 (tcx.alloc_steal_mir(body), tcx.alloc_steal_promoted(promoted))
334 }
335
336 /// Compute the MIR that is used during CTFE (and thus has no optimizations run on it)
337 fn mir_for_ctfe<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx Body<'tcx> {
338 let did = def_id.expect_local();
339 if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) {
340 tcx.mir_for_ctfe_of_const_arg(def)
341 } else {
342 tcx.arena.alloc(inner_mir_for_ctfe(tcx, ty::WithOptConstParam::unknown(did)))
343 }
344 }
345
346 /// Same as `mir_for_ctfe`, but used to get the MIR of a const generic parameter.
347 /// The docs on `WithOptConstParam` explain this a bit more, but the TLDR is that
348 /// we'd get cycle errors with `mir_for_ctfe`, because typeck would need to typeck
349 /// the const parameter while type checking the main body, which in turn would try
350 /// to type check the main body again.
351 fn mir_for_ctfe_of_const_arg<'tcx>(
352 tcx: TyCtxt<'tcx>,
353 (did, param_did): (LocalDefId, DefId),
354 ) -> &'tcx Body<'tcx> {
355 tcx.arena.alloc(inner_mir_for_ctfe(
356 tcx,
357 ty::WithOptConstParam { did, const_param_did: Some(param_did) },
358 ))
359 }
360
361 fn inner_mir_for_ctfe(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_> {
362 // FIXME: don't duplicate this between the optimized_mir/mir_for_ctfe queries
363 if tcx.is_constructor(def.did.to_def_id()) {
364 // There's no reason to run all of the MIR passes on constructors when
365 // we can just output the MIR we want directly. This also saves const
366 // qualification and borrow checking the trouble of special casing
367 // constructors.
368 return shim::build_adt_ctor(tcx, def.did.to_def_id());
369 }
370
371 let context = tcx
372 .hir()
373 .body_const_context(def.did)
374 .expect("mir_for_ctfe should not be used for runtime functions");
375
376 let mut body = tcx.mir_drops_elaborated_and_const_checked(def).borrow().clone();
377
378 match context {
379 // Do not const prop functions, either they get executed at runtime or exported to metadata,
380 // so we run const prop on them, or they don't, in which case we const evaluate some control
381 // flow paths of the function and any errors in those paths will get emitted as const eval
382 // errors.
383 hir::ConstContext::ConstFn => {}
384 // Static items always get evaluated, so we can just let const eval see if any erroneous
385 // control flow paths get executed.
386 hir::ConstContext::Static(_) => {}
387 // Associated constants get const prop run so we detect common failure situations in the
388 // crate that defined the constant.
389 // Technically we want to not run on regular const items, but oli-obk doesn't know how to
390 // conveniently detect that at this point without looking at the HIR.
391 hir::ConstContext::Const => {
392 #[rustfmt::skip]
393 let optimizations: &[&dyn MirPass<'_>] = &[
394 &const_prop::ConstProp,
395 ];
396
397 #[rustfmt::skip]
398 run_passes(
399 tcx,
400 &mut body,
401 MirPhase::Optimization,
402 &[
403 optimizations,
404 ],
405 );
406 }
407 }
408
409 debug_assert!(!body.has_free_regions(tcx), "Free regions in MIR for CTFE");
410
411 body
412 }
413
414 /// Obtain just the main MIR (no promoteds) and run some cleanups on it. This also runs
415 /// mir borrowck *before* doing so in order to ensure that borrowck can be run and doesn't
416 /// end up missing the source MIR due to stealing happening.
417 fn mir_drops_elaborated_and_const_checked<'tcx>(
418 tcx: TyCtxt<'tcx>,
419 def: ty::WithOptConstParam<LocalDefId>,
420 ) -> &'tcx Steal<Body<'tcx>> {
421 if let Some(def) = def.try_upgrade(tcx) {
422 return tcx.mir_drops_elaborated_and_const_checked(def);
423 }
424
425 // (Mir-)Borrowck uses `mir_promoted`, so we have to force it to
426 // execute before we can steal.
427 if let Some(param_did) = def.const_param_did {
428 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
429 } else {
430 tcx.ensure().mir_borrowck(def.did);
431 }
432
433 let hir_id = tcx.hir().local_def_id_to_hir_id(def.did);
434 let is_fn_like = tcx.hir().get(hir_id).fn_kind().is_some();
435 if is_fn_like {
436 let did = def.did.to_def_id();
437 let def = ty::WithOptConstParam::unknown(did);
438
439 // Do not compute the mir call graph without said call graph actually being used.
440 if inline::is_enabled(tcx) {
441 let _ = tcx.mir_inliner_callees(ty::InstanceDef::Item(def));
442 }
443 }
444
445 let (body, _) = tcx.mir_promoted(def);
446 let mut body = body.steal();
447
448 run_post_borrowck_cleanup_passes(tcx, &mut body);
449 check_consts::post_drop_elaboration::check_live_drops(tcx, &body);
450 tcx.alloc_steal_mir(body)
451 }
452
453 /// After this series of passes, no lifetime analysis based on borrowing can be done.
454 fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
455 debug!("post_borrowck_cleanup({:?})", body.source.def_id());
456
457 let post_borrowck_cleanup: &[&dyn MirPass<'tcx>] = &[
458 // Remove all things only needed by analysis
459 &simplify_branches::SimplifyBranches::new("initial"),
460 &remove_noop_landing_pads::RemoveNoopLandingPads,
461 &cleanup_post_borrowck::CleanupNonCodegenStatements,
462 &simplify::SimplifyCfg::new("early-opt"),
463 // These next passes must be executed together
464 &add_call_guards::CriticalCallEdges,
465 &elaborate_drops::ElaborateDrops,
466 // This will remove extraneous landing pads which are no longer
467 // necessary as well as well as forcing any call in a non-unwinding
468 // function calling a possibly-unwinding function to abort the process.
469 &abort_unwinding_calls::AbortUnwindingCalls,
470 // AddMovesForPackedDrops needs to run after drop
471 // elaboration.
472 &add_moves_for_packed_drops::AddMovesForPackedDrops,
473 // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
474 // but before optimizations begin.
475 &add_retag::AddRetag,
476 &lower_intrinsics::LowerIntrinsics,
477 &simplify::SimplifyCfg::new("elaborate-drops"),
478 // `Deaggregator` is conceptually part of MIR building, some backends rely on it happening
479 // and it can help optimizations.
480 &deaggregator::Deaggregator,
481 ];
482
483 run_passes(tcx, body, MirPhase::DropLowering, &[post_borrowck_cleanup]);
484 }
485
486 fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
487 let mir_opt_level = tcx.sess.mir_opt_level();
488
489 // Lowering generator control-flow and variables has to happen before we do anything else
490 // to them. We run some optimizations before that, because they may be harder to do on the state
491 // machine than on MIR with async primitives.
492 let optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[
493 &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode.
494 &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first
495 &normalize_array_len::NormalizeArrayLen, // has to run after `slice::len` lowering
496 &unreachable_prop::UnreachablePropagation,
497 &uninhabited_enum_branching::UninhabitedEnumBranching,
498 &simplify::SimplifyCfg::new("after-uninhabited-enum-branching"),
499 &inline::Inline,
500 &generator::StateTransform,
501 ];
502
503 // Even if we don't do optimizations, we still have to lower generators for codegen.
504 let no_optimizations_with_generators: &[&dyn MirPass<'tcx>] = &[&generator::StateTransform];
505
506 // The main optimizations that we do on MIR.
507 let optimizations: &[&dyn MirPass<'tcx>] = &[
508 &remove_storage_markers::RemoveStorageMarkers,
509 &remove_zsts::RemoveZsts,
510 &const_goto::ConstGoto,
511 &remove_unneeded_drops::RemoveUnneededDrops,
512 &match_branches::MatchBranchSimplification,
513 // inst combine is after MatchBranchSimplification to clean up Ne(_1, false)
514 &multiple_return_terminators::MultipleReturnTerminators,
515 &instcombine::InstCombine,
516 &separate_const_switch::SeparateConstSwitch,
517 &const_prop::ConstProp,
518 &simplify_branches::SimplifyBranches::new("after-const-prop"),
519 &early_otherwise_branch::EarlyOtherwiseBranch,
520 &simplify_comparison_integral::SimplifyComparisonIntegral,
521 &simplify_try::SimplifyArmIdentity,
522 &simplify_try::SimplifyBranchSame,
523 &dest_prop::DestinationPropagation,
524 &simplify_branches::SimplifyBranches::new("final"),
525 &remove_noop_landing_pads::RemoveNoopLandingPads,
526 &simplify::SimplifyCfg::new("final"),
527 &nrvo::RenameReturnPlace,
528 &const_debuginfo::ConstDebugInfo,
529 &simplify::SimplifyLocals,
530 &multiple_return_terminators::MultipleReturnTerminators,
531 &deduplicate_blocks::DeduplicateBlocks,
532 ];
533
534 // Optimizations to run even if mir optimizations have been disabled.
535 let no_optimizations: &[&dyn MirPass<'tcx>] = &[
536 // FIXME(#70073): This pass is responsible for both optimization as well as some lints.
537 &const_prop::ConstProp,
538 ];
539
540 // Some cleanup necessary at least for LLVM and potentially other codegen backends.
541 let pre_codegen_cleanup: &[&dyn MirPass<'tcx>] = &[
542 &add_call_guards::CriticalCallEdges,
543 // Dump the end result for testing and debugging purposes.
544 &dump_mir::Marker("PreCodegen"),
545 ];
546
547 // End of pass declarations, now actually run the passes.
548 // Generator Lowering
549 #[rustfmt::skip]
550 run_passes(
551 tcx,
552 body,
553 MirPhase::GeneratorLowering,
554 &[
555 if mir_opt_level > 0 {
556 optimizations_with_generators
557 } else {
558 no_optimizations_with_generators
559 }
560 ],
561 );
562
563 // Main optimization passes
564 #[rustfmt::skip]
565 run_passes(
566 tcx,
567 body,
568 MirPhase::Optimization,
569 &[
570 if mir_opt_level > 0 { optimizations } else { no_optimizations },
571 pre_codegen_cleanup,
572 ],
573 );
574 }
575
576 /// Optimize the MIR and prepare it for codegen.
577 fn optimized_mir<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> &'tcx Body<'tcx> {
578 let did = did.expect_local();
579 assert_eq!(ty::WithOptConstParam::try_lookup(did, tcx), None);
580 tcx.arena.alloc(inner_optimized_mir(tcx, did))
581 }
582
583 fn inner_optimized_mir(tcx: TyCtxt<'_>, did: LocalDefId) -> Body<'_> {
584 if tcx.is_constructor(did.to_def_id()) {
585 // There's no reason to run all of the MIR passes on constructors when
586 // we can just output the MIR we want directly. This also saves const
587 // qualification and borrow checking the trouble of special casing
588 // constructors.
589 return shim::build_adt_ctor(tcx, did.to_def_id());
590 }
591
592 match tcx.hir().body_const_context(did) {
593 // Run the `mir_for_ctfe` query, which depends on `mir_drops_elaborated_and_const_checked`
594 // which we are going to steal below. Thus we need to run `mir_for_ctfe` first, so it
595 // computes and caches its result.
596 Some(hir::ConstContext::ConstFn) => tcx.ensure().mir_for_ctfe(did),
597 None => {}
598 Some(other) => panic!("do not use `optimized_mir` for constants: {:?}", other),
599 }
600 let mut body =
601 tcx.mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)).steal();
602 run_optimization_passes(tcx, &mut body);
603
604 debug_assert!(!body.has_free_regions(tcx), "Free regions in optimized MIR");
605
606 body
607 }
608
609 /// Fetch all the promoteds of an item and prepare their MIR bodies to be ready for
610 /// constant evaluation once all substitutions become known.
611 fn promoted_mir<'tcx>(
612 tcx: TyCtxt<'tcx>,
613 def: ty::WithOptConstParam<LocalDefId>,
614 ) -> &'tcx IndexVec<Promoted, Body<'tcx>> {
615 if tcx.is_constructor(def.did.to_def_id()) {
616 return tcx.arena.alloc(IndexVec::new());
617 }
618
619 if let Some(param_did) = def.const_param_did {
620 tcx.ensure().mir_borrowck_const_arg((def.did, param_did));
621 } else {
622 tcx.ensure().mir_borrowck(def.did);
623 }
624 let (_, promoted) = tcx.mir_promoted(def);
625 let mut promoted = promoted.steal();
626
627 for body in &mut promoted {
628 run_post_borrowck_cleanup_passes(tcx, body);
629 }
630
631 debug_assert!(!promoted.has_free_regions(tcx), "Free regions in promoted MIR");
632
633 tcx.arena.alloc(promoted)
634 }