1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Inlining pass for MIR functions
14 use rustc
::hir
::CodegenFnAttrFlags
;
15 use rustc
::hir
::def_id
::DefId
;
17 use rustc_data_structures
::bitvec
::BitVector
;
18 use rustc_data_structures
::indexed_vec
::{Idx, IndexVec}
;
21 use rustc
::mir
::visit
::*;
22 use rustc
::ty
::{self, Instance, Ty, TyCtxt}
;
23 use rustc
::ty
::subst
::{Subst,Substs}
;
25 use std
::collections
::VecDeque
;
27 use transform
::{MirPass, MirSource}
;
28 use super::simplify
::{remove_dead_blocks, CfgSimplifier}
;
31 use rustc_target
::spec
::abi
::Abi
;
33 const DEFAULT_THRESHOLD
: usize = 50;
34 const HINT_THRESHOLD
: usize = 100;
36 const INSTR_COST
: usize = 5;
37 const CALL_PENALTY
: usize = 25;
39 const UNKNOWN_SIZE_COST
: usize = 10;
43 #[derive(Copy, Clone, Debug)]
44 struct CallSite
<'tcx
> {
46 substs
: &'tcx Substs
<'tcx
>,
51 impl MirPass
for Inline
{
52 fn run_pass
<'a
, 'tcx
>(&self,
53 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
55 mir
: &mut Mir
<'tcx
>) {
56 if tcx
.sess
.opts
.debugging_opts
.mir_opt_level
>= 2 {
57 Inliner { tcx, source }
.run_pass(mir
);
62 struct Inliner
<'a
, 'tcx
: 'a
> {
63 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
67 impl<'a
, 'tcx
> Inliner
<'a
, 'tcx
> {
68 fn run_pass(&self, caller_mir
: &mut Mir
<'tcx
>) {
69 // Keep a queue of callsites to try inlining on. We take
70 // advantage of the fact that queries detect cycles here to
71 // allow us to try and fetch the fully optimized MIR of a
72 // call; if it succeeds, we can inline it and we know that
73 // they do not call us. Otherwise, we just don't try to
76 // We use a queue so that we inline "broadly" before we inline
77 // in depth. It is unclear if this is the best heuristic,
78 // really, but that's true of all the heuristics in this
81 let mut callsites
= VecDeque
::new();
83 let param_env
= self.tcx
.param_env(self.source
.def_id
);
85 // Only do inlining into fn bodies.
86 let id
= self.tcx
.hir
.as_local_node_id(self.source
.def_id
).unwrap();
87 let body_owner_kind
= self.tcx
.hir
.body_owner_kind(id
);
88 if let (hir
::BodyOwnerKind
::Fn
, None
) = (body_owner_kind
, self.source
.promoted
) {
90 for (bb
, bb_data
) in caller_mir
.basic_blocks().iter_enumerated() {
91 // Don't inline calls that are in cleanup blocks.
92 if bb_data
.is_cleanup { continue; }
94 // Only consider direct calls to functions
95 let terminator
= bb_data
.terminator();
96 if let TerminatorKind
::Call
{
97 func
: Operand
::Constant(ref f
), .. } = terminator
.kind
{
98 if let ty
::TyFnDef(callee_def_id
, substs
) = f
.ty
.sty
{
99 if let Some(instance
) = Instance
::resolve(self.tcx
,
103 callsites
.push_back(CallSite
{
104 callee
: instance
.def_id(),
105 substs
: instance
.substs
,
107 location
: terminator
.source_info
117 let mut local_change
;
118 let mut changed
= false;
121 local_change
= false;
122 while let Some(callsite
) = callsites
.pop_front() {
123 debug
!("checking whether to inline callsite {:?}", callsite
);
124 if !self.tcx
.is_mir_available(callsite
.callee
) {
125 debug
!("checking whether to inline callsite {:?} - MIR unavailable", callsite
);
129 let callee_mir
= match self.tcx
.try_optimized_mir(callsite
.location
.span
,
131 Ok(callee_mir
) if self.should_inline(callsite
, callee_mir
) => {
132 self.tcx
.subst_and_normalize_erasing_regions(
141 // FIXME(#43542) shouldn't have to cancel an error
147 let start
= caller_mir
.basic_blocks().len();
148 debug
!("attempting to inline callsite {:?} - mir={:?}", callsite
, callee_mir
);
149 if !self.inline_call(callsite
, caller_mir
, callee_mir
) {
150 debug
!("attempting to inline callsite {:?} - failure", callsite
);
153 debug
!("attempting to inline callsite {:?} - success", callsite
);
155 // Add callsites from inlined function
156 for (bb
, bb_data
) in caller_mir
.basic_blocks().iter_enumerated().skip(start
) {
157 // Only consider direct calls to functions
158 let terminator
= bb_data
.terminator();
159 if let TerminatorKind
::Call
{
160 func
: Operand
::Constant(ref f
), .. } = terminator
.kind
{
161 if let ty
::TyFnDef(callee_def_id
, substs
) = f
.ty
.sty
{
162 // Don't inline the same function multiple times.
163 if callsite
.callee
!= callee_def_id
{
164 callsites
.push_back(CallSite
{
165 callee
: callee_def_id
,
168 location
: terminator
.source_info
184 // Simplify if we inlined anything.
186 debug
!("Running simplify cfg on {:?}", self.source
);
187 CfgSimplifier
::new(caller_mir
).simplify();
188 remove_dead_blocks(caller_mir
);
192 fn should_inline(&self,
193 callsite
: CallSite
<'tcx
>,
194 callee_mir
: &Mir
<'tcx
>)
197 debug
!("should_inline({:?})", callsite
);
200 // Don't inline closures that have captures
201 // FIXME: Handle closures better
202 if callee_mir
.upvar_decls
.len() > 0 {
203 debug
!(" upvar decls present - not inlining");
207 // Cannot inline generators which haven't been transformed yet
208 if callee_mir
.yield_ty
.is_some() {
209 debug
!(" yield ty present - not inlining");
213 // Do not inline {u,i}128 lang items, codegen const eval depends
214 // on detecting calls to these lang items and intercepting them
215 if tcx
.is_binop_lang_item(callsite
.callee
).is_some() {
216 debug
!(" not inlining 128bit integer lang item");
220 let codegen_fn_attrs
= tcx
.codegen_fn_attrs(callsite
.callee
);
222 let hinted
= match codegen_fn_attrs
.inline
{
223 // Just treat inline(always) as a hint for now,
224 // there are cases that prevent inlining that we
225 // need to check for first.
226 attr
::InlineAttr
::Always
=> true,
227 attr
::InlineAttr
::Never
=> {
228 debug
!("#[inline(never)] present - not inlining");
231 attr
::InlineAttr
::Hint
=> true,
232 attr
::InlineAttr
::None
=> false,
235 // Only inline local functions if they would be eligible for cross-crate
236 // inlining. This is to ensure that the final crate doesn't have MIR that
237 // reference unexported symbols
238 if callsite
.callee
.is_local() {
239 if callsite
.substs
.types().count() == 0 && !hinted
{
240 debug
!(" callee is an exported function - not inlining");
245 let mut threshold
= if hinted
{
251 // Significantly lower the threshold for inlining cold functions
252 if codegen_fn_attrs
.flags
.contains(CodegenFnAttrFlags
::COLD
) {
256 // Give a bonus functions with a small number of blocks,
257 // We normally have two or three blocks for even
258 // very small functions.
259 if callee_mir
.basic_blocks().len() <= 3 {
260 threshold
+= threshold
/ 4;
262 debug
!(" final inline threshold = {}", threshold
);
264 // FIXME: Give a bonus to functions with only a single caller
266 let param_env
= tcx
.param_env(self.source
.def_id
);
268 let mut first_block
= true;
271 // Traverse the MIR manually so we can account for the effects of
272 // inlining on the CFG.
273 let mut work_list
= vec
![START_BLOCK
];
274 let mut visited
= BitVector
::new(callee_mir
.basic_blocks().len());
275 while let Some(bb
) = work_list
.pop() {
276 if !visited
.insert(bb
.index()) { continue; }
277 let blk
= &callee_mir
.basic_blocks()[bb
];
279 for stmt
in &blk
.statements
{
280 // Don't count StorageLive/StorageDead in the inlining cost.
282 StatementKind
::StorageLive(_
) |
283 StatementKind
::StorageDead(_
) |
284 StatementKind
::Nop
=> {}
285 _
=> cost
+= INSTR_COST
288 let term
= blk
.terminator();
289 let mut is_drop
= false;
291 TerminatorKind
::Drop { ref location, target, unwind }
|
292 TerminatorKind
::DropAndReplace { ref location, target, unwind, .. }
=> {
294 work_list
.push(target
);
295 // If the location doesn't actually need dropping, treat it like
297 let ty
= location
.ty(callee_mir
, tcx
).subst(tcx
, callsite
.substs
);
298 let ty
= ty
.to_ty(tcx
);
299 if ty
.needs_drop(tcx
, param_env
) {
300 cost
+= CALL_PENALTY
;
301 if let Some(unwind
) = unwind
{
302 work_list
.push(unwind
);
309 TerminatorKind
::Unreachable
|
310 TerminatorKind
::Call { destination: None, .. }
if first_block
=> {
311 // If the function always diverges, don't inline
312 // unless the cost is zero
316 TerminatorKind
::Call {func: Operand::Constant(ref f), .. }
=> {
317 if let ty
::TyFnDef(def_id
, _
) = f
.ty
.sty
{
318 // Don't give intrinsics the extra penalty for calls
319 let f
= tcx
.fn_sig(def_id
);
320 if f
.abi() == Abi
::RustIntrinsic
|| f
.abi() == Abi
::PlatformIntrinsic
{
323 cost
+= CALL_PENALTY
;
327 TerminatorKind
::Assert { .. }
=> cost
+= CALL_PENALTY
,
328 _
=> cost
+= INSTR_COST
332 for &succ
in term
.successors() {
333 work_list
.push(succ
);
340 // Count up the cost of local variables and temps, if we know the size
341 // use that, otherwise we use a moderately-large dummy cost.
343 let ptr_size
= tcx
.data_layout
.pointer_size
.bytes();
345 for v
in callee_mir
.vars_and_temps_iter() {
346 let v
= &callee_mir
.local_decls
[v
];
347 let ty
= v
.ty
.subst(tcx
, callsite
.substs
);
348 // Cost of the var is the size in machine-words, if we know
350 if let Some(size
) = type_size_of(tcx
, param_env
.clone(), ty
) {
351 cost
+= (size
/ ptr_size
) as usize;
353 cost
+= UNKNOWN_SIZE_COST
;
357 if let attr
::InlineAttr
::Always
= codegen_fn_attrs
.inline
{
358 debug
!("INLINING {:?} because inline(always) [cost={}]", callsite
, cost
);
361 if cost
<= threshold
{
362 debug
!("INLINING {:?} [cost={} <= threshold={}]", callsite
, cost
, threshold
);
365 debug
!("NOT inlining {:?} [cost={} > threshold={}]", callsite
, cost
, threshold
);
371 fn inline_call(&self,
372 callsite
: CallSite
<'tcx
>,
373 caller_mir
: &mut Mir
<'tcx
>,
374 mut callee_mir
: Mir
<'tcx
>) -> bool
{
375 let terminator
= caller_mir
[callsite
.bb
].terminator
.take().unwrap();
376 match terminator
.kind
{
377 // FIXME: Handle inlining of diverging calls
378 TerminatorKind
::Call { args, destination: Some(destination), cleanup, .. }
=> {
379 debug
!("Inlined {:?} into {:?}", callsite
.callee
, self.source
);
381 let mut local_map
= IndexVec
::with_capacity(callee_mir
.local_decls
.len());
382 let mut scope_map
= IndexVec
::with_capacity(callee_mir
.source_scopes
.len());
383 let mut promoted_map
= IndexVec
::with_capacity(callee_mir
.promoted
.len());
385 for mut scope
in callee_mir
.source_scopes
.iter().cloned() {
386 if scope
.parent_scope
.is_none() {
387 scope
.parent_scope
= Some(callsite
.location
.scope
);
388 scope
.span
= callee_mir
.span
;
391 scope
.span
= callsite
.location
.span
;
393 let idx
= caller_mir
.source_scopes
.push(scope
);
397 for loc
in callee_mir
.vars_and_temps_iter() {
398 let mut local
= callee_mir
.local_decls
[loc
].clone();
400 local
.source_info
.scope
=
401 scope_map
[local
.source_info
.scope
];
402 local
.source_info
.span
= callsite
.location
.span
;
403 local
.visibility_scope
= scope_map
[local
.visibility_scope
];
405 let idx
= caller_mir
.local_decls
.push(local
);
409 for p
in callee_mir
.promoted
.iter().cloned() {
410 let idx
= caller_mir
.promoted
.push(p
);
411 promoted_map
.push(idx
);
414 // If the call is something like `a[*i] = f(i)`, where
415 // `i : &mut usize`, then just duplicating the `a[*i]`
416 // Place could result in two different locations if `f`
417 // writes to `i`. To prevent this we need to create a temporary
418 // borrow of the place and pass the destination as `*temp` instead.
419 fn dest_needs_borrow(place
: &Place
) -> bool
{
421 Place
::Projection(ref p
) => {
423 ProjectionElem
::Deref
|
424 ProjectionElem
::Index(_
) => true,
425 _
=> dest_needs_borrow(&p
.base
)
428 // Static variables need a borrow because the callee
429 // might modify the same static.
430 Place
::Static(_
) => true,
435 let dest
= if dest_needs_borrow(&destination
.0) {
436 debug
!("Creating temp for return destination");
437 let dest
= Rvalue
::Ref(
438 self.tcx
.types
.re_erased
,
439 BorrowKind
::Mut { allow_two_phase_borrow: false }
,
442 let ty
= dest
.ty(caller_mir
, self.tcx
);
444 let temp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
446 let tmp
= caller_mir
.local_decls
.push(temp
);
447 let tmp
= Place
::Local(tmp
);
449 let stmt
= Statement
{
450 source_info
: callsite
.location
,
451 kind
: StatementKind
::Assign(tmp
.clone(), dest
)
453 caller_mir
[callsite
.bb
]
454 .statements
.push(stmt
);
460 let return_block
= destination
.1;
462 // Copy the arguments if needed.
463 let args
: Vec
<_
> = self.make_call_args(args
, &callsite
, caller_mir
);
465 let bb_len
= caller_mir
.basic_blocks().len();
466 let mut integrator
= Integrator
{
475 cleanup_block
: cleanup
,
476 in_cleanup_block
: false
480 for (bb
, mut block
) in callee_mir
.basic_blocks_mut().drain_enumerated(..) {
481 integrator
.visit_basic_block_data(bb
, &mut block
);
482 caller_mir
.basic_blocks_mut().push(block
);
485 let terminator
= Terminator
{
486 source_info
: callsite
.location
,
487 kind
: TerminatorKind
::Goto { target: BasicBlock::new(bb_len) }
490 caller_mir
[callsite
.bb
].terminator
= Some(terminator
);
495 caller_mir
[callsite
.bb
].terminator
= Some(Terminator
{
496 source_info
: terminator
.source_info
,
506 args
: Vec
<Operand
<'tcx
>>,
507 callsite
: &CallSite
<'tcx
>,
508 caller_mir
: &mut Mir
<'tcx
>,
512 // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
513 // The caller provides the arguments wrapped up in a tuple:
515 // tuple_tmp = (a, b, c)
516 // Fn::call(closure_ref, tuple_tmp)
518 // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
519 // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
520 // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
523 // [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
525 // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
526 // if we "spill" that into *another* temporary, so that we can map the argument
527 // variable in the callee MIR directly to an argument variable on our side.
528 // So we introduce temporaries like:
530 // tmp0 = tuple_tmp.0
531 // tmp1 = tuple_tmp.1
532 // tmp2 = tuple_tmp.2
534 // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
535 if tcx
.is_closure(callsite
.callee
) {
536 let mut args
= args
.into_iter();
537 let self_
= self.create_temp_if_necessary(args
.next().unwrap(), callsite
, caller_mir
);
538 let tuple
= self.create_temp_if_necessary(args
.next().unwrap(), callsite
, caller_mir
);
539 assert
!(args
.next().is_none());
541 let tuple
= Place
::Local(tuple
);
542 let tuple_tys
= if let ty
::TyTuple(s
) = tuple
.ty(caller_mir
, tcx
).to_ty(tcx
).sty
{
545 bug
!("Closure arguments are not passed as a tuple");
548 // The `closure_ref` in our example above.
549 let closure_ref_arg
= iter
::once(self_
);
551 // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
553 tuple_tys
.iter().enumerate().map(|(i
, ty
)| {
554 // This is e.g. `tuple_tmp.0` in our example above.
555 let tuple_field
= Operand
::Move(tuple
.clone().field(Field
::new(i
), ty
));
557 // Spill to a local to make e.g. `tmp0`.
558 self.create_temp_if_necessary(tuple_field
, callsite
, caller_mir
)
561 closure_ref_arg
.chain(tuple_tmp_args
).collect()
564 .map(|a
| self.create_temp_if_necessary(a
, callsite
, caller_mir
))
569 /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
570 /// temporary `T` and an instruction `T = arg`, and returns `T`.
571 fn create_temp_if_necessary(
574 callsite
: &CallSite
<'tcx
>,
575 caller_mir
: &mut Mir
<'tcx
>,
577 // FIXME: Analysis of the usage of the arguments to avoid
578 // unnecessary temporaries.
580 if let Operand
::Move(Place
::Local(local
)) = arg
{
581 if caller_mir
.local_kind(local
) == LocalKind
::Temp
{
582 // Reuse the operand if it's a temporary already
587 debug
!("Creating temp for argument {:?}", arg
);
588 // Otherwise, create a temporary for the arg
589 let arg
= Rvalue
::Use(arg
);
591 let ty
= arg
.ty(caller_mir
, self.tcx
);
593 let arg_tmp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
594 let arg_tmp
= caller_mir
.local_decls
.push(arg_tmp
);
596 let stmt
= Statement
{
597 source_info
: callsite
.location
,
598 kind
: StatementKind
::Assign(Place
::Local(arg_tmp
), arg
),
600 caller_mir
[callsite
.bb
].statements
.push(stmt
);
605 fn type_size_of
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
606 param_env
: ty
::ParamEnv
<'tcx
>,
607 ty
: Ty
<'tcx
>) -> Option
<u64> {
608 tcx
.layout_of(param_env
.and(ty
)).ok().map(|layout
| layout
.size
.bytes())
614 * Integrates blocks from the callee function into the calling function.
615 * Updates block indices, references to locals and other control flow
618 struct Integrator
<'a
, 'tcx
: 'a
> {
621 local_map
: IndexVec
<Local
, Local
>,
622 scope_map
: IndexVec
<SourceScope
, SourceScope
>,
623 promoted_map
: IndexVec
<Promoted
, Promoted
>,
624 _callsite
: CallSite
<'tcx
>,
625 destination
: Place
<'tcx
>,
626 return_block
: BasicBlock
,
627 cleanup_block
: Option
<BasicBlock
>,
628 in_cleanup_block
: bool
,
631 impl<'a
, 'tcx
> Integrator
<'a
, 'tcx
> {
632 fn update_target(&self, tgt
: BasicBlock
) -> BasicBlock
{
633 let new
= BasicBlock
::new(tgt
.index() + self.block_idx
);
634 debug
!("Updating target `{:?}`, new: `{:?}`", tgt
, new
);
639 impl<'a
, 'tcx
> MutVisitor
<'tcx
> for Integrator
<'a
, 'tcx
> {
640 fn visit_local(&mut self,
642 _ctxt
: PlaceContext
<'tcx
>,
643 _location
: Location
) {
644 if *local
== RETURN_PLACE
{
645 match self.destination
{
650 ref place
=> bug
!("Return place is {:?}, not local", place
)
653 let idx
= local
.index() - 1;
654 if idx
< self.args
.len() {
655 *local
= self.args
[idx
];
658 *local
= self.local_map
[Local
::new(idx
- self.args
.len())];
661 fn visit_place(&mut self,
662 place
: &mut Place
<'tcx
>,
663 _ctxt
: PlaceContext
<'tcx
>,
664 _location
: Location
) {
665 if let Place
::Local(RETURN_PLACE
) = *place
{
666 // Return pointer; update the place itself
667 *place
= self.destination
.clone();
669 self.super_place(place
, _ctxt
, _location
);
673 fn visit_basic_block_data(&mut self, block
: BasicBlock
, data
: &mut BasicBlockData
<'tcx
>) {
674 self.in_cleanup_block
= data
.is_cleanup
;
675 self.super_basic_block_data(block
, data
);
676 self.in_cleanup_block
= false;
679 fn visit_terminator_kind(&mut self, block
: BasicBlock
,
680 kind
: &mut TerminatorKind
<'tcx
>, loc
: Location
) {
681 self.super_terminator_kind(block
, kind
, loc
);
684 TerminatorKind
::GeneratorDrop
|
685 TerminatorKind
::Yield { .. }
=> bug
!(),
686 TerminatorKind
::Goto { ref mut target}
=> {
687 *target
= self.update_target(*target
);
689 TerminatorKind
::SwitchInt { ref mut targets, .. }
=> {
691 *tgt
= self.update_target(*tgt
);
694 TerminatorKind
::Drop { ref mut target, ref mut unwind, .. }
|
695 TerminatorKind
::DropAndReplace { ref mut target, ref mut unwind, .. }
=> {
696 *target
= self.update_target(*target
);
697 if let Some(tgt
) = *unwind
{
698 *unwind
= Some(self.update_target(tgt
));
699 } else if !self.in_cleanup_block
{
700 // Unless this drop is in a cleanup block, add an unwind edge to
701 // the orignal call's cleanup block
702 *unwind
= self.cleanup_block
;
705 TerminatorKind
::Call { ref mut destination, ref mut cleanup, .. }
=> {
706 if let Some((_
, ref mut tgt
)) = *destination
{
707 *tgt
= self.update_target(*tgt
);
709 if let Some(tgt
) = *cleanup
{
710 *cleanup
= Some(self.update_target(tgt
));
711 } else if !self.in_cleanup_block
{
712 // Unless this call is in a cleanup block, add an unwind edge to
713 // the orignal call's cleanup block
714 *cleanup
= self.cleanup_block
;
717 TerminatorKind
::Assert { ref mut target, ref mut cleanup, .. }
=> {
718 *target
= self.update_target(*target
);
719 if let Some(tgt
) = *cleanup
{
720 *cleanup
= Some(self.update_target(tgt
));
721 } else if !self.in_cleanup_block
{
722 // Unless this assert is in a cleanup block, add an unwind edge to
723 // the orignal call's cleanup block
724 *cleanup
= self.cleanup_block
;
727 TerminatorKind
::Return
=> {
728 *kind
= TerminatorKind
::Goto { target: self.return_block }
;
730 TerminatorKind
::Resume
=> {
731 if let Some(tgt
) = self.cleanup_block
{
732 *kind
= TerminatorKind
::Goto { target: tgt }
735 TerminatorKind
::Abort
=> { }
736 TerminatorKind
::Unreachable
=> { }
737 TerminatorKind
::FalseEdges { ref mut real_target, ref mut imaginary_targets }
=> {
738 *real_target
= self.update_target(*real_target
);
739 for target
in imaginary_targets
{
740 *target
= self.update_target(*target
);
743 TerminatorKind
::FalseUnwind { real_target: _ , unwind: _ }
=>
744 // see the ordering of passes in the optimized_mir query.
745 bug
!("False unwinds should have been removed before inlining")
749 fn visit_source_scope(&mut self, scope
: &mut SourceScope
) {
750 *scope
= self.scope_map
[*scope
];
753 fn visit_literal(&mut self, literal
: &mut Literal
<'tcx
>, loc
: Location
) {
754 if let Literal
::Promoted { ref mut index }
= *literal
{
755 if let Some(p
) = self.promoted_map
.get(*index
).cloned() {
759 self.super_literal(literal
, loc
);