1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! Inlining pass for MIR functions
14 use rustc
::hir
::def_id
::DefId
;
16 use rustc_data_structures
::bitvec
::BitVector
;
17 use rustc_data_structures
::indexed_vec
::{Idx, IndexVec}
;
20 use rustc
::mir
::visit
::*;
21 use rustc
::ty
::{self, Instance, Ty, TyCtxt, TypeFoldable}
;
22 use rustc
::ty
::subst
::{Subst,Substs}
;
24 use std
::collections
::VecDeque
;
25 use transform
::{MirPass, MirSource}
;
26 use super::simplify
::{remove_dead_blocks, CfgSimplifier}
;
31 const DEFAULT_THRESHOLD
: usize = 50;
32 const HINT_THRESHOLD
: usize = 100;
34 const INSTR_COST
: usize = 5;
35 const CALL_PENALTY
: usize = 25;
37 const UNKNOWN_SIZE_COST
: usize = 10;
41 #[derive(Copy, Clone, Debug)]
42 struct CallSite
<'tcx
> {
44 substs
: &'tcx Substs
<'tcx
>,
49 impl MirPass
for Inline
{
50 fn run_pass
<'a
, 'tcx
>(&self,
51 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
53 mir
: &mut Mir
<'tcx
>) {
54 if tcx
.sess
.opts
.debugging_opts
.mir_opt_level
>= 2 {
55 Inliner { tcx, source }
.run_pass(mir
);
60 struct Inliner
<'a
, 'tcx
: 'a
> {
61 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
65 impl<'a
, 'tcx
> Inliner
<'a
, 'tcx
> {
66 fn run_pass(&self, caller_mir
: &mut Mir
<'tcx
>) {
67 // Keep a queue of callsites to try inlining on. We take
68 // advantage of the fact that queries detect cycles here to
69 // allow us to try and fetch the fully optimized MIR of a
70 // call; if it succeeds, we can inline it and we know that
71 // they do not call us. Otherwise, we just don't try to
74 // We use a queue so that we inline "broadly" before we inline
75 // in depth. It is unclear if this is the best heuristic,
76 // really, but that's true of all the heuristics in this
79 let mut callsites
= VecDeque
::new();
81 let param_env
= self.tcx
.param_env(self.source
.def_id
);
83 // Only do inlining into fn bodies.
84 let id
= self.tcx
.hir
.as_local_node_id(self.source
.def_id
).unwrap();
85 let body_owner_kind
= self.tcx
.hir
.body_owner_kind(id
);
86 if let (hir
::BodyOwnerKind
::Fn
, None
) = (body_owner_kind
, self.source
.promoted
) {
88 for (bb
, bb_data
) in caller_mir
.basic_blocks().iter_enumerated() {
89 // Don't inline calls that are in cleanup blocks.
90 if bb_data
.is_cleanup { continue; }
92 // Only consider direct calls to functions
93 let terminator
= bb_data
.terminator();
94 if let TerminatorKind
::Call
{
95 func
: Operand
::Constant(ref f
), .. } = terminator
.kind
{
96 if let ty
::TyFnDef(callee_def_id
, substs
) = f
.ty
.sty
{
97 if let Some(instance
) = Instance
::resolve(self.tcx
,
101 callsites
.push_back(CallSite
{
102 callee
: instance
.def_id(),
103 substs
: instance
.substs
,
105 location
: terminator
.source_info
115 let mut local_change
;
116 let mut changed
= false;
119 local_change
= false;
120 while let Some(callsite
) = callsites
.pop_front() {
121 debug
!("checking whether to inline callsite {:?}", callsite
);
122 if !self.tcx
.is_mir_available(callsite
.callee
) {
123 debug
!("checking whether to inline callsite {:?} - MIR unavailable", callsite
);
127 let callee_mir
= match ty
::queries
::optimized_mir
::try_get(self.tcx
,
128 callsite
.location
.span
,
130 Ok(ref callee_mir
) if self.should_inline(callsite
, callee_mir
) => {
131 subst_and_normalize(callee_mir
, self.tcx
, &callsite
.substs
, param_env
)
136 // FIXME(#43542) shouldn't have to cancel an error
142 let start
= caller_mir
.basic_blocks().len();
143 debug
!("attempting to inline callsite {:?} - mir={:?}", callsite
, callee_mir
);
144 if !self.inline_call(callsite
, caller_mir
, callee_mir
) {
145 debug
!("attempting to inline callsite {:?} - failure", callsite
);
148 debug
!("attempting to inline callsite {:?} - success", callsite
);
150 // Add callsites from inlined function
151 for (bb
, bb_data
) in caller_mir
.basic_blocks().iter_enumerated().skip(start
) {
152 // Only consider direct calls to functions
153 let terminator
= bb_data
.terminator();
154 if let TerminatorKind
::Call
{
155 func
: Operand
::Constant(ref f
), .. } = terminator
.kind
{
156 if let ty
::TyFnDef(callee_def_id
, substs
) = f
.ty
.sty
{
157 // Don't inline the same function multiple times.
158 if callsite
.callee
!= callee_def_id
{
159 callsites
.push_back(CallSite
{
160 callee
: callee_def_id
,
163 location
: terminator
.source_info
179 // Simplify if we inlined anything.
181 debug
!("Running simplify cfg on {:?}", self.source
);
182 CfgSimplifier
::new(caller_mir
).simplify();
183 remove_dead_blocks(caller_mir
);
187 fn should_inline(&self,
188 callsite
: CallSite
<'tcx
>,
189 callee_mir
: &Mir
<'tcx
>)
192 debug
!("should_inline({:?})", callsite
);
195 // Don't inline closures that have captures
196 // FIXME: Handle closures better
197 if callee_mir
.upvar_decls
.len() > 0 {
198 debug
!(" upvar decls present - not inlining");
202 // Cannot inline generators which haven't been transformed yet
203 if callee_mir
.yield_ty
.is_some() {
204 debug
!(" yield ty present - not inlining");
208 let attrs
= tcx
.get_attrs(callsite
.callee
);
209 let hint
= attr
::find_inline_attr(None
, &attrs
[..]);
211 let hinted
= match hint
{
212 // Just treat inline(always) as a hint for now,
213 // there are cases that prevent inlining that we
214 // need to check for first.
215 attr
::InlineAttr
::Always
=> true,
216 attr
::InlineAttr
::Never
=> {
217 debug
!("#[inline(never)] present - not inlining");
220 attr
::InlineAttr
::Hint
=> true,
221 attr
::InlineAttr
::None
=> false,
224 // Only inline local functions if they would be eligible for cross-crate
225 // inlining. This is to ensure that the final crate doesn't have MIR that
226 // reference unexported symbols
227 if callsite
.callee
.is_local() {
228 if callsite
.substs
.types().count() == 0 && !hinted
{
229 debug
!(" callee is an exported function - not inlining");
234 let mut threshold
= if hinted
{
240 // Significantly lower the threshold for inlining cold functions
241 if attr
::contains_name(&attrs
[..], "cold") {
245 // Give a bonus functions with a small number of blocks,
246 // We normally have two or three blocks for even
247 // very small functions.
248 if callee_mir
.basic_blocks().len() <= 3 {
249 threshold
+= threshold
/ 4;
251 debug
!(" final inline threshold = {}", threshold
);
253 // FIXME: Give a bonus to functions with only a single caller
255 let param_env
= tcx
.param_env(self.source
.def_id
);
257 let mut first_block
= true;
260 // Traverse the MIR manually so we can account for the effects of
261 // inlining on the CFG.
262 let mut work_list
= vec
![START_BLOCK
];
263 let mut visited
= BitVector
::new(callee_mir
.basic_blocks().len());
264 while let Some(bb
) = work_list
.pop() {
265 if !visited
.insert(bb
.index()) { continue; }
266 let blk
= &callee_mir
.basic_blocks()[bb
];
268 for stmt
in &blk
.statements
{
269 // Don't count StorageLive/StorageDead in the inlining cost.
271 StatementKind
::StorageLive(_
) |
272 StatementKind
::StorageDead(_
) |
273 StatementKind
::Nop
=> {}
274 _
=> cost
+= INSTR_COST
277 let term
= blk
.terminator();
278 let mut is_drop
= false;
280 TerminatorKind
::Drop { ref location, target, unwind }
|
281 TerminatorKind
::DropAndReplace { ref location, target, unwind, .. }
=> {
283 work_list
.push(target
);
284 // If the location doesn't actually need dropping, treat it like
286 let ty
= location
.ty(callee_mir
, tcx
).subst(tcx
, callsite
.substs
);
287 let ty
= ty
.to_ty(tcx
);
288 if ty
.needs_drop(tcx
, param_env
) {
289 cost
+= CALL_PENALTY
;
290 if let Some(unwind
) = unwind
{
291 work_list
.push(unwind
);
298 TerminatorKind
::Unreachable
|
299 TerminatorKind
::Call { destination: None, .. }
if first_block
=> {
300 // If the function always diverges, don't inline
301 // unless the cost is zero
305 TerminatorKind
::Call {func: Operand::Constant(ref f), .. }
=> {
306 if let ty
::TyFnDef(def_id
, _
) = f
.ty
.sty
{
307 // Don't give intrinsics the extra penalty for calls
308 let f
= tcx
.fn_sig(def_id
);
309 if f
.abi() == Abi
::RustIntrinsic
|| f
.abi() == Abi
::PlatformIntrinsic
{
312 cost
+= CALL_PENALTY
;
316 TerminatorKind
::Assert { .. }
=> cost
+= CALL_PENALTY
,
317 _
=> cost
+= INSTR_COST
321 for &succ
in &term
.successors()[..] {
322 work_list
.push(succ
);
329 // Count up the cost of local variables and temps, if we know the size
330 // use that, otherwise we use a moderately-large dummy cost.
332 let ptr_size
= tcx
.data_layout
.pointer_size
.bytes();
334 for v
in callee_mir
.vars_and_temps_iter() {
335 let v
= &callee_mir
.local_decls
[v
];
336 let ty
= v
.ty
.subst(tcx
, callsite
.substs
);
337 // Cost of the var is the size in machine-words, if we know
339 if let Some(size
) = type_size_of(tcx
, param_env
.clone(), ty
) {
340 cost
+= (size
/ ptr_size
) as usize;
342 cost
+= UNKNOWN_SIZE_COST
;
346 if let attr
::InlineAttr
::Always
= hint
{
347 debug
!("INLINING {:?} because inline(always) [cost={}]", callsite
, cost
);
350 if cost
<= threshold
{
351 debug
!("INLINING {:?} [cost={} <= threshold={}]", callsite
, cost
, threshold
);
354 debug
!("NOT inlining {:?} [cost={} > threshold={}]", callsite
, cost
, threshold
);
360 fn inline_call(&self,
361 callsite
: CallSite
<'tcx
>,
362 caller_mir
: &mut Mir
<'tcx
>,
363 mut callee_mir
: Mir
<'tcx
>) -> bool
{
364 let terminator
= caller_mir
[callsite
.bb
].terminator
.take().unwrap();
365 match terminator
.kind
{
366 // FIXME: Handle inlining of diverging calls
367 TerminatorKind
::Call { args, destination: Some(destination), cleanup, .. }
=> {
368 debug
!("Inlined {:?} into {:?}", callsite
.callee
, self.source
);
370 let is_box_free
= Some(callsite
.callee
) == self.tcx
.lang_items().box_free_fn();
372 let mut local_map
= IndexVec
::with_capacity(callee_mir
.local_decls
.len());
373 let mut scope_map
= IndexVec
::with_capacity(callee_mir
.visibility_scopes
.len());
374 let mut promoted_map
= IndexVec
::with_capacity(callee_mir
.promoted
.len());
376 for mut scope
in callee_mir
.visibility_scopes
.iter().cloned() {
377 if scope
.parent_scope
.is_none() {
378 scope
.parent_scope
= Some(callsite
.location
.scope
);
379 scope
.span
= callee_mir
.span
;
382 scope
.span
= callsite
.location
.span
;
384 let idx
= caller_mir
.visibility_scopes
.push(scope
);
388 for loc
in callee_mir
.vars_and_temps_iter() {
389 let mut local
= callee_mir
.local_decls
[loc
].clone();
391 local
.source_info
.scope
= scope_map
[local
.source_info
.scope
];
392 local
.source_info
.span
= callsite
.location
.span
;
394 let idx
= caller_mir
.local_decls
.push(local
);
398 for p
in callee_mir
.promoted
.iter().cloned() {
399 let idx
= caller_mir
.promoted
.push(p
);
400 promoted_map
.push(idx
);
403 // If the call is something like `a[*i] = f(i)`, where
404 // `i : &mut usize`, then just duplicating the `a[*i]`
405 // Lvalue could result in two different locations if `f`
406 // writes to `i`. To prevent this we need to create a temporary
407 // borrow of the lvalue and pass the destination as `*temp` instead.
408 fn dest_needs_borrow(lval
: &Lvalue
) -> bool
{
410 Lvalue
::Projection(ref p
) => {
412 ProjectionElem
::Deref
|
413 ProjectionElem
::Index(_
) => true,
414 _
=> dest_needs_borrow(&p
.base
)
417 // Static variables need a borrow because the callee
418 // might modify the same static.
419 Lvalue
::Static(_
) => true,
424 let dest
= if dest_needs_borrow(&destination
.0) {
425 debug
!("Creating temp for return destination");
426 let dest
= Rvalue
::Ref(
427 self.tcx
.types
.re_erased
,
431 let ty
= dest
.ty(caller_mir
, self.tcx
);
433 let temp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
435 let tmp
= caller_mir
.local_decls
.push(temp
);
436 let tmp
= Lvalue
::Local(tmp
);
438 let stmt
= Statement
{
439 source_info
: callsite
.location
,
440 kind
: StatementKind
::Assign(tmp
.clone(), dest
)
442 caller_mir
[callsite
.bb
]
443 .statements
.push(stmt
);
449 let return_block
= destination
.1;
451 let args
: Vec
<_
> = if is_box_free
{
452 assert
!(args
.len() == 1);
453 // box_free takes a Box, but is defined with a *mut T, inlining
454 // needs to generate the cast.
455 // FIXME: we should probably just generate correct MIR in the first place...
457 let arg
= if let Operand
::Consume(ref lval
) = args
[0] {
460 bug
!("Constant arg to \"box_free\"");
463 let ptr_ty
= args
[0].ty(caller_mir
, self.tcx
);
464 vec
![self.cast_box_free_arg(arg
, ptr_ty
, &callsite
, caller_mir
)]
466 // Copy the arguments if needed.
467 self.make_call_args(args
, &callsite
, caller_mir
)
470 let bb_len
= caller_mir
.basic_blocks().len();
471 let mut integrator
= Integrator
{
480 cleanup_block
: cleanup
,
481 in_cleanup_block
: false
485 for (bb
, mut block
) in callee_mir
.basic_blocks_mut().drain_enumerated(..) {
486 integrator
.visit_basic_block_data(bb
, &mut block
);
487 caller_mir
.basic_blocks_mut().push(block
);
490 let terminator
= Terminator
{
491 source_info
: callsite
.location
,
492 kind
: TerminatorKind
::Goto { target: BasicBlock::new(bb_len) }
495 caller_mir
[callsite
.bb
].terminator
= Some(terminator
);
500 caller_mir
[callsite
.bb
].terminator
= Some(Terminator
{
501 source_info
: terminator
.source_info
,
509 fn cast_box_free_arg(&self, arg
: Lvalue
<'tcx
>, ptr_ty
: Ty
<'tcx
>,
510 callsite
: &CallSite
<'tcx
>, caller_mir
: &mut Mir
<'tcx
>) -> Operand
<'tcx
> {
511 let arg
= Rvalue
::Ref(
512 self.tcx
.types
.re_erased
,
516 let ty
= arg
.ty(caller_mir
, self.tcx
);
517 let ref_tmp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
518 let ref_tmp
= caller_mir
.local_decls
.push(ref_tmp
);
519 let ref_tmp
= Lvalue
::Local(ref_tmp
);
521 let ref_stmt
= Statement
{
522 source_info
: callsite
.location
,
523 kind
: StatementKind
::Assign(ref_tmp
.clone(), arg
)
526 caller_mir
[callsite
.bb
]
527 .statements
.push(ref_stmt
);
529 let pointee_ty
= match ptr_ty
.sty
{
530 ty
::TyRawPtr(tm
) | ty
::TyRef(_
, tm
) => tm
.ty
,
531 _
if ptr_ty
.is_box() => ptr_ty
.boxed_ty(),
532 _
=> bug
!("Invalid type `{:?}` for call to box_free", ptr_ty
)
534 let ptr_ty
= self.tcx
.mk_mut_ptr(pointee_ty
);
536 let raw_ptr
= Rvalue
::Cast(CastKind
::Misc
, Operand
::Consume(ref_tmp
), ptr_ty
);
538 let cast_tmp
= LocalDecl
::new_temp(ptr_ty
, callsite
.location
.span
);
539 let cast_tmp
= caller_mir
.local_decls
.push(cast_tmp
);
540 let cast_tmp
= Lvalue
::Local(cast_tmp
);
542 let cast_stmt
= Statement
{
543 source_info
: callsite
.location
,
544 kind
: StatementKind
::Assign(cast_tmp
.clone(), raw_ptr
)
547 caller_mir
[callsite
.bb
]
548 .statements
.push(cast_stmt
);
550 Operand
::Consume(cast_tmp
)
555 args
: Vec
<Operand
<'tcx
>>,
556 callsite
: &CallSite
<'tcx
>,
557 caller_mir
: &mut Mir
<'tcx
>,
558 ) -> Vec
<Operand
<'tcx
>> {
561 // A closure is passed its self-type and a tuple like `(arg1, arg2, ...)`,
562 // hence mappings to tuple fields are needed.
563 if tcx
.is_closure(callsite
.callee
) {
564 let mut args
= args
.into_iter();
565 let self_
= self.create_temp_if_necessary(args
.next().unwrap(), callsite
, caller_mir
);
566 let tuple
= self.create_temp_if_necessary(args
.next().unwrap(), callsite
, caller_mir
);
567 assert
!(args
.next().is_none());
569 let tuple_tys
= if let ty
::TyTuple(s
, _
) = tuple
.ty(caller_mir
, tcx
).to_ty(tcx
).sty
{
572 bug
!("Closure arguments are not passed as a tuple");
575 let mut res
= Vec
::with_capacity(1 + tuple_tys
.len());
576 res
.push(Operand
::Consume(self_
));
577 res
.extend(tuple_tys
.iter().enumerate().map(|(i
, ty
)| {
578 Operand
::Consume(tuple
.clone().field(Field
::new(i
), ty
))
583 .map(|a
| Operand
::Consume(self.create_temp_if_necessary(a
, callsite
, caller_mir
)))
588 /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
589 /// temporary `T` and an instruction `T = arg`, and returns `T`.
590 fn create_temp_if_necessary(
593 callsite
: &CallSite
<'tcx
>,
594 caller_mir
: &mut Mir
<'tcx
>,
596 // FIXME: Analysis of the usage of the arguments to avoid
597 // unnecessary temporaries.
599 if let Operand
::Consume(Lvalue
::Local(local
)) = arg
{
600 if caller_mir
.local_kind(local
) == LocalKind
::Temp
{
601 // Reuse the operand if it's a temporary already
602 return Lvalue
::Local(local
);
606 debug
!("Creating temp for argument {:?}", arg
);
607 // Otherwise, create a temporary for the arg
608 let arg
= Rvalue
::Use(arg
);
610 let ty
= arg
.ty(caller_mir
, self.tcx
);
612 let arg_tmp
= LocalDecl
::new_temp(ty
, callsite
.location
.span
);
613 let arg_tmp
= caller_mir
.local_decls
.push(arg_tmp
);
614 let arg_tmp
= Lvalue
::Local(arg_tmp
);
616 let stmt
= Statement
{
617 source_info
: callsite
.location
,
618 kind
: StatementKind
::Assign(arg_tmp
.clone(), arg
),
620 caller_mir
[callsite
.bb
].statements
.push(stmt
);
625 fn type_size_of
<'a
, 'tcx
>(tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
626 param_env
: ty
::ParamEnv
<'tcx
>,
627 ty
: Ty
<'tcx
>) -> Option
<u64> {
628 ty
.layout(tcx
, param_env
).ok().map(|layout
| {
629 layout
.size(&tcx
.data_layout
).bytes()
633 fn subst_and_normalize
<'a
, 'tcx
: 'a
>(
635 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
636 substs
: &'tcx ty
::subst
::Substs
<'tcx
>,
637 param_env
: ty
::ParamEnv
<'tcx
>,
639 struct Folder
<'a
, 'tcx
: 'a
> {
640 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
641 param_env
: ty
::ParamEnv
<'tcx
>,
642 substs
: &'tcx ty
::subst
::Substs
<'tcx
>,
644 impl<'a
, 'tcx
: 'a
> ty
::fold
::TypeFolder
<'tcx
, 'tcx
> for Folder
<'a
, 'tcx
> {
645 fn tcx
<'b
>(&'b
self) -> TyCtxt
<'b
, 'tcx
, 'tcx
> {
649 fn fold_ty(&mut self, t
: Ty
<'tcx
>) -> Ty
<'tcx
> {
650 self.tcx
.trans_apply_param_substs_env(&self.substs
, self.param_env
, &t
)
653 let mut f
= Folder { tcx, param_env, substs }
;
654 mir
.fold_with(&mut f
)
660 * Integrates blocks from the callee function into the calling function.
661 * Updates block indices, references to locals and other control flow
664 struct Integrator
<'a
, 'tcx
: 'a
> {
666 args
: &'a
[Operand
<'tcx
>],
667 local_map
: IndexVec
<Local
, Local
>,
668 scope_map
: IndexVec
<VisibilityScope
, VisibilityScope
>,
669 promoted_map
: IndexVec
<Promoted
, Promoted
>,
670 _callsite
: CallSite
<'tcx
>,
671 destination
: Lvalue
<'tcx
>,
672 return_block
: BasicBlock
,
673 cleanup_block
: Option
<BasicBlock
>,
674 in_cleanup_block
: bool
,
677 impl<'a
, 'tcx
> Integrator
<'a
, 'tcx
> {
678 fn update_target(&self, tgt
: BasicBlock
) -> BasicBlock
{
679 let new
= BasicBlock
::new(tgt
.index() + self.block_idx
);
680 debug
!("Updating target `{:?}`, new: `{:?}`", tgt
, new
);
684 fn arg_index(&self, arg
: Local
) -> Option
<usize> {
685 let idx
= arg
.index();
686 if idx
> 0 && idx
<= self.args
.len() {
694 impl<'a
, 'tcx
> MutVisitor
<'tcx
> for Integrator
<'a
, 'tcx
> {
695 fn visit_local(&mut self,
697 _ctxt
: LvalueContext
<'tcx
>,
698 _location
: Location
) {
699 if *local
== RETURN_POINTER
{
700 match self.destination
{
701 Lvalue
::Local(l
) => {
705 ref lval
=> bug
!("Return lvalue is {:?}, not local", lval
)
708 let idx
= local
.index() - 1;
709 if idx
< self.args
.len() {
710 match self.args
[idx
] {
711 Operand
::Consume(Lvalue
::Local(l
)) => {
715 ref op
=> bug
!("Arg operand `{:?}` is {:?}, not local", idx
, op
)
718 *local
= self.local_map
[Local
::new(idx
- self.args
.len())];
721 fn visit_lvalue(&mut self,
722 lvalue
: &mut Lvalue
<'tcx
>,
723 _ctxt
: LvalueContext
<'tcx
>,
724 _location
: Location
) {
725 if let Lvalue
::Local(RETURN_POINTER
) = *lvalue
{
726 // Return pointer; update the lvalue itself
727 *lvalue
= self.destination
.clone();
729 self.super_lvalue(lvalue
, _ctxt
, _location
);
733 fn visit_operand(&mut self, operand
: &mut Operand
<'tcx
>, location
: Location
) {
734 if let Operand
::Consume(Lvalue
::Local(arg
)) = *operand
{
735 if let Some(idx
) = self.arg_index(arg
) {
736 let new_arg
= self.args
[idx
].clone();
741 self.super_operand(operand
, location
);
744 fn visit_basic_block_data(&mut self, block
: BasicBlock
, data
: &mut BasicBlockData
<'tcx
>) {
745 self.in_cleanup_block
= data
.is_cleanup
;
746 self.super_basic_block_data(block
, data
);
747 self.in_cleanup_block
= false;
750 fn visit_terminator_kind(&mut self, block
: BasicBlock
,
751 kind
: &mut TerminatorKind
<'tcx
>, loc
: Location
) {
752 self.super_terminator_kind(block
, kind
, loc
);
755 TerminatorKind
::GeneratorDrop
|
756 TerminatorKind
::Yield { .. }
=> bug
!(),
757 TerminatorKind
::Goto { ref mut target}
=> {
758 *target
= self.update_target(*target
);
760 TerminatorKind
::SwitchInt { ref mut targets, .. }
=> {
762 *tgt
= self.update_target(*tgt
);
765 TerminatorKind
::Drop { ref mut target, ref mut unwind, .. }
|
766 TerminatorKind
::DropAndReplace { ref mut target, ref mut unwind, .. }
=> {
767 *target
= self.update_target(*target
);
768 if let Some(tgt
) = *unwind
{
769 *unwind
= Some(self.update_target(tgt
));
770 } else if !self.in_cleanup_block
{
771 // Unless this drop is in a cleanup block, add an unwind edge to
772 // the orignal call's cleanup block
773 *unwind
= self.cleanup_block
;
776 TerminatorKind
::Call { ref mut destination, ref mut cleanup, .. }
=> {
777 if let Some((_
, ref mut tgt
)) = *destination
{
778 *tgt
= self.update_target(*tgt
);
780 if let Some(tgt
) = *cleanup
{
781 *cleanup
= Some(self.update_target(tgt
));
782 } else if !self.in_cleanup_block
{
783 // Unless this call is in a cleanup block, add an unwind edge to
784 // the orignal call's cleanup block
785 *cleanup
= self.cleanup_block
;
788 TerminatorKind
::Assert { ref mut target, ref mut cleanup, .. }
=> {
789 *target
= self.update_target(*target
);
790 if let Some(tgt
) = *cleanup
{
791 *cleanup
= Some(self.update_target(tgt
));
792 } else if !self.in_cleanup_block
{
793 // Unless this assert is in a cleanup block, add an unwind edge to
794 // the orignal call's cleanup block
795 *cleanup
= self.cleanup_block
;
798 TerminatorKind
::Return
=> {
799 *kind
= TerminatorKind
::Goto { target: self.return_block }
;
801 TerminatorKind
::Resume
=> {
802 if let Some(tgt
) = self.cleanup_block
{
803 *kind
= TerminatorKind
::Goto { target: tgt }
806 TerminatorKind
::Unreachable
=> { }
807 TerminatorKind
::FalseEdges { ref mut real_target, ref mut imaginary_targets }
=> {
808 *real_target
= self.update_target(*real_target
);
809 for target
in imaginary_targets
{
810 *target
= self.update_target(*target
);
816 fn visit_visibility_scope(&mut self, scope
: &mut VisibilityScope
) {
817 *scope
= self.scope_map
[*scope
];
820 fn visit_literal(&mut self, literal
: &mut Literal
<'tcx
>, loc
: Location
) {
821 if let Literal
::Promoted { ref mut index }
= *literal
{
822 if let Some(p
) = self.promoted_map
.get(*index
).cloned() {
826 self.super_literal(literal
, loc
);