1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Inlining pass for MIR functions
14 use rustc
::hir
::TransFnAttrFlags
;
15 use rustc
::hir
::def_id
::DefId
;
17 use rustc_data_structures
::bitvec
::BitVector
;
18 use rustc_data_structures
::indexed_vec
::{Idx, IndexVec}
;
21 use rustc
::mir
::visit
::*;
22 use rustc
::ty
::{self, Instance, Ty, TyCtxt}
;
23 use rustc
::ty
::subst
::{Subst,Substs}
;
25 use std
::collections
::VecDeque
;
27 use transform
::{MirPass, MirSource}
;
28 use super::simplify
::{remove_dead_blocks, CfgSimplifier}
;
33 const DEFAULT_THRESHOLD
: usize = 50;
34 const HINT_THRESHOLD
: usize = 100;
36 const INSTR_COST
: usize = 5;
37 const CALL_PENALTY
: usize = 25;
39 const UNKNOWN_SIZE_COST
: usize = 10;
43 #[derive(Copy, Clone, Debug)]
44 struct CallSite
<'tcx
> {
46 substs
: &'tcx Substs
<'tcx
>,
51 impl MirPass
for Inline
{
52 fn run_pass
<'a
, 'tcx
>(&self,
53 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
55 mir
: &mut Mir
<'tcx
>) {
56 if tcx
.sess
.opts
.debugging_opts
.mir_opt_level
>= 2 {
57 Inliner { tcx, source }
.run_pass(mir
);
62 struct Inliner
<'a
, 'tcx
: 'a
> {
63 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
67 impl<'a
, 'tcx
> Inliner
<'a
, 'tcx
> {
68 fn run_pass(&self, caller_mir
: &mut Mir
<'tcx
>) {
69 // Keep a queue of callsites to try inlining on. We take
70 // advantage of the fact that queries detect cycles here to
71 // allow us to try and fetch the fully optimized MIR of a
72 // call; if it succeeds, we can inline it and we know that
73 // they do not call us. Otherwise, we just don't try to
76 // We use a queue so that we inline "broadly" before we inline
77 // in depth. It is unclear if this is the best heuristic,
78 // really, but that's true of all the heuristics in this
81 let mut callsites
= VecDeque
::new();
83 let param_env
= self.tcx
.param_env(self.source
.def_id
);
85 // Only do inlining into fn bodies.
86 let id
= self.tcx
.hir
.as_local_node_id(self.source
.def_id
).unwrap();
87 let body_owner_kind
= self.tcx
.hir
.body_owner_kind(id
);
88 if let (hir
::BodyOwnerKind
::Fn
, None
) = (body_owner_kind
, self.source
.promoted
) {
90 for (bb
, bb_data
) in caller_mir
.basic_blocks().iter_enumerated() {
91 // Don't inline calls that are in cleanup blocks.
92 if bb_data
.is_cleanup { continue; }
94 // Only consider direct calls to functions
95 let terminator
= bb_data
.terminator();
96 if let TerminatorKind
::Call
{
97 func
: Operand
::Constant(ref f
), .. } = terminator
.kind
{
98 if let ty
::TyFnDef(callee_def_id
, substs
) = f
.ty
.sty
{
99 if let Some(instance
) = Instance
::resolve(self.tcx
,
103 callsites
.push_back(CallSite
{
104 callee
: instance
.def_id(),
105 substs
: instance
.substs
,
107 location
: terminator
.source_info
117 let mut local_change
;
118 let mut changed
= false;
121 local_change
= false;
122 while let Some(callsite
) = callsites
.pop_front() {
123 debug
!("checking whether to inline callsite {:?}", callsite
);
124 if !self.tcx
.is_mir_available(callsite
.callee
) {
125 debug
!("checking whether to inline callsite {:?} - MIR unavailable", callsite
);
129 let callee_mir
= match ty
::queries
::optimized_mir
::try_get(self.tcx
,
130 callsite
.location
.span
,
132 Ok(callee_mir
) if self.should_inline(callsite
, callee_mir
) => {
133 self.tcx
.subst_and_normalize_erasing_regions(
142 // FIXME(#43542) shouldn't have to cancel an error
148 let start
= caller_mir
.basic_blocks().len();
149 debug
!("attempting to inline callsite {:?} - mir={:?}", callsite
, callee_mir
);
150 if !self.inline_call(callsite
, caller_mir
, callee_mir
) {
151 debug
!("attempting to inline callsite {:?} - failure", callsite
);
154 debug
!("attempting to inline callsite {:?} - success", callsite
);
156 // Add callsites from inlined function
157 for (bb
, bb_data
) in caller_mir
.basic_blocks().iter_enumerated().skip(start
) {
158 // Only consider direct calls to functions
159 let terminator
= bb_data
.terminator();
160 if let TerminatorKind
::Call
{
161 func
: Operand
::Constant(ref f
), .. } = terminator
.kind
{
162 if let ty
::TyFnDef(callee_def_id
, substs
) = f
.ty
.sty
{
163 // Don't inline the same function multiple times.
164 if callsite
.callee
!= callee_def_id
{
165 callsites
.push_back(CallSite
{
166 callee
: callee_def_id
,
169 location
: terminator
.source_info
185 // Simplify if we inlined anything.
187 debug
!("Running simplify cfg on {:?}", self.source
);
188 CfgSimplifier
::new(caller_mir
).simplify();
189 remove_dead_blocks(caller_mir
);
193 fn should_inline(&self,
194 callsite
: CallSite
<'tcx
>,
195 callee_mir
: &Mir
<'tcx
>)
198 debug
!("should_inline({:?})", callsite
);
201 // Don't inline closures that have captures
202 // FIXME: Handle closures better
203 if callee_mir
.upvar_decls
.len() > 0 {
204 debug
!(" upvar decls present - not inlining");
208 // Cannot inline generators which haven't been transformed yet
209 if callee_mir
.yield_ty
.is_some() {
210 debug
!(" yield ty present - not inlining");
214 // Do not inline {u,i}128 lang items, trans const eval depends
215 // on detecting calls to these lang items and intercepting them
216 if tcx
.is_binop_lang_item(callsite
.callee
).is_some() {
217 debug
!(" not inlining 128bit integer lang item");
221 let trans_fn_attrs
= tcx
.trans_fn_attrs(callsite
.callee
);
223 let hinted
= match trans_fn_attrs
.inline
{
224 // Just treat inline(always) as a hint for now,
225 // there are cases that prevent inlining that we
226 // need to check for first.
227 attr
::InlineAttr
::Always
=> true,
228 attr
::InlineAttr
::Never
=> {
229 debug
!("#[inline(never)] present - not inlining");
232 attr
::InlineAttr
::Hint
=> true,
233 attr
::InlineAttr
::None
=> false,
236 // Only inline local functions if they would be eligible for cross-crate
237 // inlining. This is to ensure that the final crate doesn't have MIR that
238 // reference unexported symbols
239 if callsite
.callee
.is_local() {
240 if callsite
.substs
.types().count() == 0 && !hinted
{
241 debug
!(" callee is an exported function - not inlining");
246 let mut threshold
= if hinted
{
252 // Significantly lower the threshold for inlining cold functions
253 if trans_fn_attrs
.flags
.contains(TransFnAttrFlags
::COLD
) {
257 // Give a bonus functions with a small number of blocks,
258 // We normally have two or three blocks for even
259 // very small functions.
260 if callee_mir
.basic_blocks().len() <= 3 {
261 threshold
+= threshold
/ 4;
263 debug
!(" final inline threshold = {}", threshold
);
265 // FIXME: Give a bonus to functions with only a single caller
267 let param_env
= tcx
.param_env(self.source
.def_id
);
269 let mut first_block
= true;
272 // Traverse the MIR manually so we can account for the effects of
273 // inlining on the CFG.
274 let mut work_list
= vec
![START_BLOCK
];
275 let mut visited
= BitVector
::new(callee_mir
.basic_blocks().len());
276 while let Some(bb
) = work_list
.pop() {
277 if !visited
.insert(bb
.index()) { continue; }
278 let blk
= &callee_mir
.basic_blocks()[bb
];
280 for stmt
in &blk
.statements
{
281 // Don't count StorageLive/StorageDead in the inlining cost.
283 StatementKind
::StorageLive(_
) |
284 StatementKind
::StorageDead(_
) |
285 StatementKind
::Nop
=> {}
286 _
=> cost
+= INSTR_COST
289 let term
= blk
.terminator();
290 let mut is_drop
= false;
292 TerminatorKind
::Drop { ref location, target, unwind }
|
293 TerminatorKind
::DropAndReplace { ref location, target, unwind, .. }
=> {
295 work_list
.push(target
);
296 // If the location doesn't actually need dropping, treat it like
298 let ty
= location
.ty(callee_mir
, tcx
).subst(tcx
, callsite
.substs
);
299 let ty
= ty
.to_ty(tcx
);
300 if ty
.needs_drop(tcx
, param_env
) {
301 cost
+= CALL_PENALTY
;
302 if let Some(unwind
) = unwind
{
303 work_list
.push(unwind
);
310 TerminatorKind
::Unreachable
|
311 TerminatorKind
::Call { destination: None, .. }
if first_block
=> {
312 // If the function always diverges, don't inline
313 // unless the cost is zero
317 TerminatorKind
::Call {func: Operand::Constant(ref f), .. }
=> {
318 if let ty
::TyFnDef(def_id
, _
) = f
.ty
.sty
{
319 // Don't give intrinsics the extra penalty for calls
320 let f
= tcx
.fn_sig(def_id
);
321 if f
.abi() == Abi
::RustIntrinsic
|| f
.abi() == Abi
::PlatformIntrinsic
{
324 cost
+= CALL_PENALTY
;
328 TerminatorKind
::Assert { .. }
=> cost
+= CALL_PENALTY
,
329 _
=> cost
+= INSTR_COST
333 for &succ
in &term
.successors()[..] {
334 work_list
.push(succ
);
341 // Count up the cost of local variables and temps, if we know the size
342 // use that, otherwise we use a moderately-large dummy cost.
344 let ptr_size
= tcx
.data_layout
.pointer_size
.bytes();
346 for v
in callee_mir
.vars_and_temps_iter() {
347 let v
= &callee_mir
.local_decls
[v
];
348 let ty
= v
.ty
.subst(tcx
, callsite
.substs
);
349 // Cost of the var is the size in machine-words, if we know
351 if let Some(size
) = type_size_of(tcx
, param_env
.clone(), ty
) {
352 cost
+= (size
/ ptr_size
) as usize;
354 cost
+= UNKNOWN_SIZE_COST
;
358 if let attr
::InlineAttr
::Always
= trans_fn_attrs
.inline
{
359 debug
!("INLINING {:?} because inline(always) [cost={}]", callsite
, cost
);
362 if cost
<= threshold
{
363 debug
!("INLINING {:?} [cost={} <= threshold={}]", callsite
, cost
, threshold
);
366 debug
!("NOT inlining {:?} [cost={} > threshold={}]", callsite
, cost
, threshold
);
372 fn inline_call(&self,
373 callsite
: CallSite
<'tcx
>,
374 caller_mir
: &mut Mir
<'tcx
>,
375 mut callee_mir
: Mir
<'tcx
>) -> bool
{
376 let terminator
= caller_mir
[callsite
.bb
].terminator
.take().unwrap();
377 match terminator
.kind
{
378 // FIXME: Handle inlining of diverging calls
379 TerminatorKind
::Call { args, destination: Some(destination), cleanup, .. }
=> {
380 debug
!("Inlined {:?} into {:?}", callsite
.callee
, self.source
);
382 let is_box_free
= Some(callsite
.callee
) == self.tcx
.lang_items().box_free_fn();
384 let mut local_map
= IndexVec
::with_capacity(callee_mir
.local_decls
.len());
385 let mut scope_map
= IndexVec
::with_capacity(callee_mir
.visibility_scopes
.len());
386 let mut promoted_map
= IndexVec
::with_capacity(callee_mir
.promoted
.len());
388 for mut scope
in callee_mir
.visibility_scopes
.iter().cloned() {
389 if scope
.parent_scope
.is_none() {
390 scope
.parent_scope
= Some(callsite
.location
.scope
);
391 scope
.span
= callee_mir
.span
;
394 scope
.span
= callsite
.location
.span
;
396 let idx
= caller_mir
.visibility_scopes
.push(scope
);
400 for loc
in callee_mir
.vars_and_temps_iter() {
401 let mut local
= callee_mir
.local_decls
[loc
].clone();
403 local
.source_info
.scope
= scope_map
[local
.source_info
.scope
];
404 local
.source_info
.span
= callsite
.location
.span
;
406 let idx
= caller_mir
.local_decls
.push(local
);
410 for p
in callee_mir
.promoted
.iter().cloned() {
411 let idx
= caller_mir
.promoted
.push(p
);
412 promoted_map
.push(idx
);
415 // If the call is something like `a[*i] = f(i)`, where
416 // `i : &mut usize`, then just duplicating the `a[*i]`
417 // Place could result in two different locations if `f`
418 // writes to `i`. To prevent this we need to create a temporary
419 // borrow of the place and pass the destination as `*temp` instead.
420 fn dest_needs_borrow(place
: &Place
) -> bool
{
422 Place
::Projection(ref p
) => {
424 ProjectionElem
::Deref
|
425 ProjectionElem
::Index(_
) => true,
426 _
=> dest_needs_borrow(&p
.base
)
429 // Static variables need a borrow because the callee
430 // might modify the same static.
431 Place
::Static(_
) => true,
436 let dest
= if dest_needs_borrow(&destination
.0) {
437 debug
!("Creating temp for return destination");
438 let dest
= Rvalue
::Ref(
439 self.tcx
.types
.re_erased
,
440 BorrowKind
::Mut { allow_two_phase_borrow: false }
,
443 let ty
= dest
.ty(caller_mir
, self.tcx
);
445 let temp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
447 let tmp
= caller_mir
.local_decls
.push(temp
);
448 let tmp
= Place
::Local(tmp
);
450 let stmt
= Statement
{
451 source_info
: callsite
.location
,
452 kind
: StatementKind
::Assign(tmp
.clone(), dest
)
454 caller_mir
[callsite
.bb
]
455 .statements
.push(stmt
);
461 let return_block
= destination
.1;
463 let args
: Vec
<_
> = if is_box_free
{
464 assert
!(args
.len() == 1);
465 // box_free takes a Box, but is defined with a *mut T, inlining
466 // needs to generate the cast.
467 // FIXME: we should probably just generate correct MIR in the first place...
469 let arg
= if let Operand
::Move(ref place
) = args
[0] {
472 bug
!("Constant arg to \"box_free\"");
475 let ptr_ty
= args
[0].ty(caller_mir
, self.tcx
);
476 vec
![self.cast_box_free_arg(arg
, ptr_ty
, &callsite
, caller_mir
)]
478 // Copy the arguments if needed.
479 self.make_call_args(args
, &callsite
, caller_mir
)
482 let bb_len
= caller_mir
.basic_blocks().len();
483 let mut integrator
= Integrator
{
492 cleanup_block
: cleanup
,
493 in_cleanup_block
: false
497 for (bb
, mut block
) in callee_mir
.basic_blocks_mut().drain_enumerated(..) {
498 integrator
.visit_basic_block_data(bb
, &mut block
);
499 caller_mir
.basic_blocks_mut().push(block
);
502 let terminator
= Terminator
{
503 source_info
: callsite
.location
,
504 kind
: TerminatorKind
::Goto { target: BasicBlock::new(bb_len) }
507 caller_mir
[callsite
.bb
].terminator
= Some(terminator
);
512 caller_mir
[callsite
.bb
].terminator
= Some(Terminator
{
513 source_info
: terminator
.source_info
,
521 fn cast_box_free_arg(&self, arg
: Place
<'tcx
>, ptr_ty
: Ty
<'tcx
>,
522 callsite
: &CallSite
<'tcx
>, caller_mir
: &mut Mir
<'tcx
>) -> Local
{
523 let arg
= Rvalue
::Ref(
524 self.tcx
.types
.re_erased
,
525 BorrowKind
::Mut { allow_two_phase_borrow: false }
,
528 let ty
= arg
.ty(caller_mir
, self.tcx
);
529 let ref_tmp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
530 let ref_tmp
= caller_mir
.local_decls
.push(ref_tmp
);
531 let ref_tmp
= Place
::Local(ref_tmp
);
533 let ref_stmt
= Statement
{
534 source_info
: callsite
.location
,
535 kind
: StatementKind
::Assign(ref_tmp
.clone(), arg
)
538 caller_mir
[callsite
.bb
]
539 .statements
.push(ref_stmt
);
541 let pointee_ty
= match ptr_ty
.sty
{
542 ty
::TyRawPtr(tm
) | ty
::TyRef(_
, tm
) => tm
.ty
,
543 _
if ptr_ty
.is_box() => ptr_ty
.boxed_ty(),
544 _
=> bug
!("Invalid type `{:?}` for call to box_free", ptr_ty
)
546 let ptr_ty
= self.tcx
.mk_mut_ptr(pointee_ty
);
548 let raw_ptr
= Rvalue
::Cast(CastKind
::Misc
, Operand
::Move(ref_tmp
), ptr_ty
);
550 let cast_tmp
= LocalDecl
::new_temp(ptr_ty
, callsite
.location
.span
);
551 let cast_tmp
= caller_mir
.local_decls
.push(cast_tmp
);
553 let cast_stmt
= Statement
{
554 source_info
: callsite
.location
,
555 kind
: StatementKind
::Assign(Place
::Local(cast_tmp
), raw_ptr
)
558 caller_mir
[callsite
.bb
]
559 .statements
.push(cast_stmt
);
566 args
: Vec
<Operand
<'tcx
>>,
567 callsite
: &CallSite
<'tcx
>,
568 caller_mir
: &mut Mir
<'tcx
>,
572 // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
573 // The caller provides the arguments wrapped up in a tuple:
575 // tuple_tmp = (a, b, c)
576 // Fn::call(closure_ref, tuple_tmp)
578 // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
579 // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, trans has
580 // the job of unpacking this tuple. But here, we are trans. =) So we want to create
583 // [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
585 // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
586 // if we "spill" that into *another* temporary, so that we can map the argument
587 // variable in the callee MIR directly to an argument variable on our side.
588 // So we introduce temporaries like:
590 // tmp0 = tuple_tmp.0
591 // tmp1 = tuple_tmp.1
592 // tmp2 = tuple_tmp.2
594 // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
595 if tcx
.is_closure(callsite
.callee
) {
596 let mut args
= args
.into_iter();
597 let self_
= self.create_temp_if_necessary(args
.next().unwrap(), callsite
, caller_mir
);
598 let tuple
= self.create_temp_if_necessary(args
.next().unwrap(), callsite
, caller_mir
);
599 assert
!(args
.next().is_none());
601 let tuple
= Place
::Local(tuple
);
602 let tuple_tys
= if let ty
::TyTuple(s
) = tuple
.ty(caller_mir
, tcx
).to_ty(tcx
).sty
{
605 bug
!("Closure arguments are not passed as a tuple");
608 // The `closure_ref` in our example above.
609 let closure_ref_arg
= iter
::once(self_
);
611 // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
613 tuple_tys
.iter().enumerate().map(|(i
, ty
)| {
614 // This is e.g. `tuple_tmp.0` in our example above.
615 let tuple_field
= Operand
::Move(tuple
.clone().field(Field
::new(i
), ty
));
617 // Spill to a local to make e.g. `tmp0`.
618 self.create_temp_if_necessary(tuple_field
, callsite
, caller_mir
)
621 closure_ref_arg
.chain(tuple_tmp_args
).collect()
624 .map(|a
| self.create_temp_if_necessary(a
, callsite
, caller_mir
))
629 /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
630 /// temporary `T` and an instruction `T = arg`, and returns `T`.
631 fn create_temp_if_necessary(
634 callsite
: &CallSite
<'tcx
>,
635 caller_mir
: &mut Mir
<'tcx
>,
637 // FIXME: Analysis of the usage of the arguments to avoid
638 // unnecessary temporaries.
640 if let Operand
::Move(Place
::Local(local
)) = arg
{
641 if caller_mir
.local_kind(local
) == LocalKind
::Temp
{
642 // Reuse the operand if it's a temporary already
647 debug
!("Creating temp for argument {:?}", arg
);
648 // Otherwise, create a temporary for the arg
649 let arg
= Rvalue
::Use(arg
);
651 let ty
= arg
.ty(caller_mir
, self.tcx
);
653 let arg_tmp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
654 let arg_tmp
= caller_mir
.local_decls
.push(arg_tmp
);
656 let stmt
= Statement
{
657 source_info
: callsite
.location
,
658 kind
: StatementKind
::Assign(Place
::Local(arg_tmp
), arg
),
660 caller_mir
[callsite
.bb
].statements
.push(stmt
);
665 fn type_size_of
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
666 param_env
: ty
::ParamEnv
<'tcx
>,
667 ty
: Ty
<'tcx
>) -> Option
<u64> {
668 tcx
.layout_of(param_env
.and(ty
)).ok().map(|layout
| layout
.size
.bytes())
674 * Integrates blocks from the callee function into the calling function.
675 * Updates block indices, references to locals and other control flow
678 struct Integrator
<'a
, 'tcx
: 'a
> {
681 local_map
: IndexVec
<Local
, Local
>,
682 scope_map
: IndexVec
<VisibilityScope
, VisibilityScope
>,
683 promoted_map
: IndexVec
<Promoted
, Promoted
>,
684 _callsite
: CallSite
<'tcx
>,
685 destination
: Place
<'tcx
>,
686 return_block
: BasicBlock
,
687 cleanup_block
: Option
<BasicBlock
>,
688 in_cleanup_block
: bool
,
691 impl<'a
, 'tcx
> Integrator
<'a
, 'tcx
> {
692 fn update_target(&self, tgt
: BasicBlock
) -> BasicBlock
{
693 let new
= BasicBlock
::new(tgt
.index() + self.block_idx
);
694 debug
!("Updating target `{:?}`, new: `{:?}`", tgt
, new
);
699 impl<'a
, 'tcx
> MutVisitor
<'tcx
> for Integrator
<'a
, 'tcx
> {
700 fn visit_local(&mut self,
702 _ctxt
: PlaceContext
<'tcx
>,
703 _location
: Location
) {
704 if *local
== RETURN_PLACE
{
705 match self.destination
{
710 ref place
=> bug
!("Return place is {:?}, not local", place
)
713 let idx
= local
.index() - 1;
714 if idx
< self.args
.len() {
715 *local
= self.args
[idx
];
718 *local
= self.local_map
[Local
::new(idx
- self.args
.len())];
721 fn visit_place(&mut self,
722 place
: &mut Place
<'tcx
>,
723 _ctxt
: PlaceContext
<'tcx
>,
724 _location
: Location
) {
725 if let Place
::Local(RETURN_PLACE
) = *place
{
726 // Return pointer; update the place itself
727 *place
= self.destination
.clone();
729 self.super_place(place
, _ctxt
, _location
);
733 fn visit_basic_block_data(&mut self, block
: BasicBlock
, data
: &mut BasicBlockData
<'tcx
>) {
734 self.in_cleanup_block
= data
.is_cleanup
;
735 self.super_basic_block_data(block
, data
);
736 self.in_cleanup_block
= false;
739 fn visit_terminator_kind(&mut self, block
: BasicBlock
,
740 kind
: &mut TerminatorKind
<'tcx
>, loc
: Location
) {
741 self.super_terminator_kind(block
, kind
, loc
);
744 TerminatorKind
::GeneratorDrop
|
745 TerminatorKind
::Yield { .. }
=> bug
!(),
746 TerminatorKind
::Goto { ref mut target}
=> {
747 *target
= self.update_target(*target
);
749 TerminatorKind
::SwitchInt { ref mut targets, .. }
=> {
751 *tgt
= self.update_target(*tgt
);
754 TerminatorKind
::Drop { ref mut target, ref mut unwind, .. }
|
755 TerminatorKind
::DropAndReplace { ref mut target, ref mut unwind, .. }
=> {
756 *target
= self.update_target(*target
);
757 if let Some(tgt
) = *unwind
{
758 *unwind
= Some(self.update_target(tgt
));
759 } else if !self.in_cleanup_block
{
760 // Unless this drop is in a cleanup block, add an unwind edge to
761 // the orignal call's cleanup block
762 *unwind
= self.cleanup_block
;
765 TerminatorKind
::Call { ref mut destination, ref mut cleanup, .. }
=> {
766 if let Some((_
, ref mut tgt
)) = *destination
{
767 *tgt
= self.update_target(*tgt
);
769 if let Some(tgt
) = *cleanup
{
770 *cleanup
= Some(self.update_target(tgt
));
771 } else if !self.in_cleanup_block
{
772 // Unless this call is in a cleanup block, add an unwind edge to
773 // the orignal call's cleanup block
774 *cleanup
= self.cleanup_block
;
777 TerminatorKind
::Assert { ref mut target, ref mut cleanup, .. }
=> {
778 *target
= self.update_target(*target
);
779 if let Some(tgt
) = *cleanup
{
780 *cleanup
= Some(self.update_target(tgt
));
781 } else if !self.in_cleanup_block
{
782 // Unless this assert is in a cleanup block, add an unwind edge to
783 // the orignal call's cleanup block
784 *cleanup
= self.cleanup_block
;
787 TerminatorKind
::Return
=> {
788 *kind
= TerminatorKind
::Goto { target: self.return_block }
;
790 TerminatorKind
::Resume
=> {
791 if let Some(tgt
) = self.cleanup_block
{
792 *kind
= TerminatorKind
::Goto { target: tgt }
795 TerminatorKind
::Abort
=> { }
796 TerminatorKind
::Unreachable
=> { }
797 TerminatorKind
::FalseEdges { ref mut real_target, ref mut imaginary_targets }
=> {
798 *real_target
= self.update_target(*real_target
);
799 for target
in imaginary_targets
{
800 *target
= self.update_target(*target
);
803 TerminatorKind
::FalseUnwind { real_target: _ , unwind: _ }
=>
804 // see the ordering of passes in the optimized_mir query.
805 bug
!("False unwinds should have been removed before inlining")
809 fn visit_visibility_scope(&mut self, scope
: &mut VisibilityScope
) {
810 *scope
= self.scope_map
[*scope
];
813 fn visit_literal(&mut self, literal
: &mut Literal
<'tcx
>, loc
: Location
) {
814 if let Literal
::Promoted { ref mut index }
= *literal
{
815 if let Some(p
) = self.promoted_map
.get(*index
).cloned() {
819 self.super_literal(literal
, loc
);