]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/transform/inline.rs
New upstream version 1.26.0+dfsg1
[rustc.git] / src / librustc_mir / transform / inline.rs
1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Inlining pass for MIR functions
12
13 use rustc::hir;
14 use rustc::hir::TransFnAttrFlags;
15 use rustc::hir::def_id::DefId;
16
17 use rustc_data_structures::bitvec::BitVector;
18 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
19
20 use rustc::mir::*;
21 use rustc::mir::visit::*;
22 use rustc::ty::{self, Instance, Ty, TyCtxt};
23 use rustc::ty::subst::{Subst,Substs};
24
25 use std::collections::VecDeque;
26 use std::iter;
27 use transform::{MirPass, MirSource};
28 use super::simplify::{remove_dead_blocks, CfgSimplifier};
29
30 use syntax::{attr};
31 use syntax::abi::Abi;
32
33 const DEFAULT_THRESHOLD: usize = 50;
34 const HINT_THRESHOLD: usize = 100;
35
36 const INSTR_COST: usize = 5;
37 const CALL_PENALTY: usize = 25;
38
39 const UNKNOWN_SIZE_COST: usize = 10;
40
41 pub struct Inline;
42
43 #[derive(Copy, Clone, Debug)]
44 struct CallSite<'tcx> {
45 callee: DefId,
46 substs: &'tcx Substs<'tcx>,
47 bb: BasicBlock,
48 location: SourceInfo,
49 }
50
51 impl MirPass for Inline {
52 fn run_pass<'a, 'tcx>(&self,
53 tcx: TyCtxt<'a, 'tcx, 'tcx>,
54 source: MirSource,
55 mir: &mut Mir<'tcx>) {
56 if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
57 Inliner { tcx, source }.run_pass(mir);
58 }
59 }
60 }
61
62 struct Inliner<'a, 'tcx: 'a> {
63 tcx: TyCtxt<'a, 'tcx, 'tcx>,
64 source: MirSource,
65 }
66
67 impl<'a, 'tcx> Inliner<'a, 'tcx> {
68 fn run_pass(&self, caller_mir: &mut Mir<'tcx>) {
69 // Keep a queue of callsites to try inlining on. We take
70 // advantage of the fact that queries detect cycles here to
71 // allow us to try and fetch the fully optimized MIR of a
72 // call; if it succeeds, we can inline it and we know that
73 // they do not call us. Otherwise, we just don't try to
74 // inline.
75 //
76 // We use a queue so that we inline "broadly" before we inline
77 // in depth. It is unclear if this is the best heuristic,
78 // really, but that's true of all the heuristics in this
79 // file. =)
80
81 let mut callsites = VecDeque::new();
82
83 let param_env = self.tcx.param_env(self.source.def_id);
84
85 // Only do inlining into fn bodies.
86 let id = self.tcx.hir.as_local_node_id(self.source.def_id).unwrap();
87 let body_owner_kind = self.tcx.hir.body_owner_kind(id);
88 if let (hir::BodyOwnerKind::Fn, None) = (body_owner_kind, self.source.promoted) {
89
90 for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated() {
91 // Don't inline calls that are in cleanup blocks.
92 if bb_data.is_cleanup { continue; }
93
94 // Only consider direct calls to functions
95 let terminator = bb_data.terminator();
96 if let TerminatorKind::Call {
97 func: Operand::Constant(ref f), .. } = terminator.kind {
98 if let ty::TyFnDef(callee_def_id, substs) = f.ty.sty {
99 if let Some(instance) = Instance::resolve(self.tcx,
100 param_env,
101 callee_def_id,
102 substs) {
103 callsites.push_back(CallSite {
104 callee: instance.def_id(),
105 substs: instance.substs,
106 bb,
107 location: terminator.source_info
108 });
109 }
110 }
111 }
112 }
113 } else {
114 return;
115 }
116
117 let mut local_change;
118 let mut changed = false;
119
120 loop {
121 local_change = false;
122 while let Some(callsite) = callsites.pop_front() {
123 debug!("checking whether to inline callsite {:?}", callsite);
124 if !self.tcx.is_mir_available(callsite.callee) {
125 debug!("checking whether to inline callsite {:?} - MIR unavailable", callsite);
126 continue;
127 }
128
129 let callee_mir = match ty::queries::optimized_mir::try_get(self.tcx,
130 callsite.location.span,
131 callsite.callee) {
132 Ok(callee_mir) if self.should_inline(callsite, callee_mir) => {
133 self.tcx.subst_and_normalize_erasing_regions(
134 &callsite.substs,
135 param_env,
136 callee_mir,
137 )
138 }
139 Ok(_) => continue,
140
141 Err(mut bug) => {
142 // FIXME(#43542) shouldn't have to cancel an error
143 bug.cancel();
144 continue
145 }
146 };
147
148 let start = caller_mir.basic_blocks().len();
149 debug!("attempting to inline callsite {:?} - mir={:?}", callsite, callee_mir);
150 if !self.inline_call(callsite, caller_mir, callee_mir) {
151 debug!("attempting to inline callsite {:?} - failure", callsite);
152 continue;
153 }
154 debug!("attempting to inline callsite {:?} - success", callsite);
155
156 // Add callsites from inlined function
157 for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
158 // Only consider direct calls to functions
159 let terminator = bb_data.terminator();
160 if let TerminatorKind::Call {
161 func: Operand::Constant(ref f), .. } = terminator.kind {
162 if let ty::TyFnDef(callee_def_id, substs) = f.ty.sty {
163 // Don't inline the same function multiple times.
164 if callsite.callee != callee_def_id {
165 callsites.push_back(CallSite {
166 callee: callee_def_id,
167 substs,
168 bb,
169 location: terminator.source_info
170 });
171 }
172 }
173 }
174 }
175
176 local_change = true;
177 changed = true;
178 }
179
180 if !local_change {
181 break;
182 }
183 }
184
185 // Simplify if we inlined anything.
186 if changed {
187 debug!("Running simplify cfg on {:?}", self.source);
188 CfgSimplifier::new(caller_mir).simplify();
189 remove_dead_blocks(caller_mir);
190 }
191 }
192
193 fn should_inline(&self,
194 callsite: CallSite<'tcx>,
195 callee_mir: &Mir<'tcx>)
196 -> bool
197 {
198 debug!("should_inline({:?})", callsite);
199 let tcx = self.tcx;
200
201 // Don't inline closures that have captures
202 // FIXME: Handle closures better
203 if callee_mir.upvar_decls.len() > 0 {
204 debug!(" upvar decls present - not inlining");
205 return false;
206 }
207
208 // Cannot inline generators which haven't been transformed yet
209 if callee_mir.yield_ty.is_some() {
210 debug!(" yield ty present - not inlining");
211 return false;
212 }
213
214 // Do not inline {u,i}128 lang items, trans const eval depends
215 // on detecting calls to these lang items and intercepting them
216 if tcx.is_binop_lang_item(callsite.callee).is_some() {
217 debug!(" not inlining 128bit integer lang item");
218 return false;
219 }
220
221 let trans_fn_attrs = tcx.trans_fn_attrs(callsite.callee);
222
223 let hinted = match trans_fn_attrs.inline {
224 // Just treat inline(always) as a hint for now,
225 // there are cases that prevent inlining that we
226 // need to check for first.
227 attr::InlineAttr::Always => true,
228 attr::InlineAttr::Never => {
229 debug!("#[inline(never)] present - not inlining");
230 return false
231 }
232 attr::InlineAttr::Hint => true,
233 attr::InlineAttr::None => false,
234 };
235
236 // Only inline local functions if they would be eligible for cross-crate
237 // inlining. This is to ensure that the final crate doesn't have MIR that
238 // reference unexported symbols
239 if callsite.callee.is_local() {
240 if callsite.substs.types().count() == 0 && !hinted {
241 debug!(" callee is an exported function - not inlining");
242 return false;
243 }
244 }
245
246 let mut threshold = if hinted {
247 HINT_THRESHOLD
248 } else {
249 DEFAULT_THRESHOLD
250 };
251
252 // Significantly lower the threshold for inlining cold functions
253 if trans_fn_attrs.flags.contains(TransFnAttrFlags::COLD) {
254 threshold /= 5;
255 }
256
257 // Give a bonus functions with a small number of blocks,
258 // We normally have two or three blocks for even
259 // very small functions.
260 if callee_mir.basic_blocks().len() <= 3 {
261 threshold += threshold / 4;
262 }
263 debug!(" final inline threshold = {}", threshold);
264
265 // FIXME: Give a bonus to functions with only a single caller
266
267 let param_env = tcx.param_env(self.source.def_id);
268
269 let mut first_block = true;
270 let mut cost = 0;
271
272 // Traverse the MIR manually so we can account for the effects of
273 // inlining on the CFG.
274 let mut work_list = vec![START_BLOCK];
275 let mut visited = BitVector::new(callee_mir.basic_blocks().len());
276 while let Some(bb) = work_list.pop() {
277 if !visited.insert(bb.index()) { continue; }
278 let blk = &callee_mir.basic_blocks()[bb];
279
280 for stmt in &blk.statements {
281 // Don't count StorageLive/StorageDead in the inlining cost.
282 match stmt.kind {
283 StatementKind::StorageLive(_) |
284 StatementKind::StorageDead(_) |
285 StatementKind::Nop => {}
286 _ => cost += INSTR_COST
287 }
288 }
289 let term = blk.terminator();
290 let mut is_drop = false;
291 match term.kind {
292 TerminatorKind::Drop { ref location, target, unwind } |
293 TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
294 is_drop = true;
295 work_list.push(target);
296 // If the location doesn't actually need dropping, treat it like
297 // a regular goto.
298 let ty = location.ty(callee_mir, tcx).subst(tcx, callsite.substs);
299 let ty = ty.to_ty(tcx);
300 if ty.needs_drop(tcx, param_env) {
301 cost += CALL_PENALTY;
302 if let Some(unwind) = unwind {
303 work_list.push(unwind);
304 }
305 } else {
306 cost += INSTR_COST;
307 }
308 }
309
310 TerminatorKind::Unreachable |
311 TerminatorKind::Call { destination: None, .. } if first_block => {
312 // If the function always diverges, don't inline
313 // unless the cost is zero
314 threshold = 0;
315 }
316
317 TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
318 if let ty::TyFnDef(def_id, _) = f.ty.sty {
319 // Don't give intrinsics the extra penalty for calls
320 let f = tcx.fn_sig(def_id);
321 if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
322 cost += INSTR_COST;
323 } else {
324 cost += CALL_PENALTY;
325 }
326 }
327 }
328 TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
329 _ => cost += INSTR_COST
330 }
331
332 if !is_drop {
333 for &succ in &term.successors()[..] {
334 work_list.push(succ);
335 }
336 }
337
338 first_block = false;
339 }
340
341 // Count up the cost of local variables and temps, if we know the size
342 // use that, otherwise we use a moderately-large dummy cost.
343
344 let ptr_size = tcx.data_layout.pointer_size.bytes();
345
346 for v in callee_mir.vars_and_temps_iter() {
347 let v = &callee_mir.local_decls[v];
348 let ty = v.ty.subst(tcx, callsite.substs);
349 // Cost of the var is the size in machine-words, if we know
350 // it.
351 if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
352 cost += (size / ptr_size) as usize;
353 } else {
354 cost += UNKNOWN_SIZE_COST;
355 }
356 }
357
358 if let attr::InlineAttr::Always = trans_fn_attrs.inline {
359 debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
360 true
361 } else {
362 if cost <= threshold {
363 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
364 true
365 } else {
366 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
367 false
368 }
369 }
370 }
371
372 fn inline_call(&self,
373 callsite: CallSite<'tcx>,
374 caller_mir: &mut Mir<'tcx>,
375 mut callee_mir: Mir<'tcx>) -> bool {
376 let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
377 match terminator.kind {
378 // FIXME: Handle inlining of diverging calls
379 TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
380 debug!("Inlined {:?} into {:?}", callsite.callee, self.source);
381
382 let is_box_free = Some(callsite.callee) == self.tcx.lang_items().box_free_fn();
383
384 let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
385 let mut scope_map = IndexVec::with_capacity(callee_mir.visibility_scopes.len());
386 let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
387
388 for mut scope in callee_mir.visibility_scopes.iter().cloned() {
389 if scope.parent_scope.is_none() {
390 scope.parent_scope = Some(callsite.location.scope);
391 scope.span = callee_mir.span;
392 }
393
394 scope.span = callsite.location.span;
395
396 let idx = caller_mir.visibility_scopes.push(scope);
397 scope_map.push(idx);
398 }
399
400 for loc in callee_mir.vars_and_temps_iter() {
401 let mut local = callee_mir.local_decls[loc].clone();
402
403 local.source_info.scope = scope_map[local.source_info.scope];
404 local.source_info.span = callsite.location.span;
405
406 let idx = caller_mir.local_decls.push(local);
407 local_map.push(idx);
408 }
409
410 for p in callee_mir.promoted.iter().cloned() {
411 let idx = caller_mir.promoted.push(p);
412 promoted_map.push(idx);
413 }
414
415 // If the call is something like `a[*i] = f(i)`, where
416 // `i : &mut usize`, then just duplicating the `a[*i]`
417 // Place could result in two different locations if `f`
418 // writes to `i`. To prevent this we need to create a temporary
419 // borrow of the place and pass the destination as `*temp` instead.
420 fn dest_needs_borrow(place: &Place) -> bool {
421 match *place {
422 Place::Projection(ref p) => {
423 match p.elem {
424 ProjectionElem::Deref |
425 ProjectionElem::Index(_) => true,
426 _ => dest_needs_borrow(&p.base)
427 }
428 }
429 // Static variables need a borrow because the callee
430 // might modify the same static.
431 Place::Static(_) => true,
432 _ => false
433 }
434 }
435
436 let dest = if dest_needs_borrow(&destination.0) {
437 debug!("Creating temp for return destination");
438 let dest = Rvalue::Ref(
439 self.tcx.types.re_erased,
440 BorrowKind::Mut { allow_two_phase_borrow: false },
441 destination.0);
442
443 let ty = dest.ty(caller_mir, self.tcx);
444
445 let temp = LocalDecl::new_temp(ty, callsite.location.span);
446
447 let tmp = caller_mir.local_decls.push(temp);
448 let tmp = Place::Local(tmp);
449
450 let stmt = Statement {
451 source_info: callsite.location,
452 kind: StatementKind::Assign(tmp.clone(), dest)
453 };
454 caller_mir[callsite.bb]
455 .statements.push(stmt);
456 tmp.deref()
457 } else {
458 destination.0
459 };
460
461 let return_block = destination.1;
462
463 let args : Vec<_> = if is_box_free {
464 assert!(args.len() == 1);
465 // box_free takes a Box, but is defined with a *mut T, inlining
466 // needs to generate the cast.
467 // FIXME: we should probably just generate correct MIR in the first place...
468
469 let arg = if let Operand::Move(ref place) = args[0] {
470 place.clone()
471 } else {
472 bug!("Constant arg to \"box_free\"");
473 };
474
475 let ptr_ty = args[0].ty(caller_mir, self.tcx);
476 vec![self.cast_box_free_arg(arg, ptr_ty, &callsite, caller_mir)]
477 } else {
478 // Copy the arguments if needed.
479 self.make_call_args(args, &callsite, caller_mir)
480 };
481
482 let bb_len = caller_mir.basic_blocks().len();
483 let mut integrator = Integrator {
484 block_idx: bb_len,
485 args: &args,
486 local_map,
487 scope_map,
488 promoted_map,
489 _callsite: callsite,
490 destination: dest,
491 return_block,
492 cleanup_block: cleanup,
493 in_cleanup_block: false
494 };
495
496
497 for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
498 integrator.visit_basic_block_data(bb, &mut block);
499 caller_mir.basic_blocks_mut().push(block);
500 }
501
502 let terminator = Terminator {
503 source_info: callsite.location,
504 kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
505 };
506
507 caller_mir[callsite.bb].terminator = Some(terminator);
508
509 true
510 }
511 kind => {
512 caller_mir[callsite.bb].terminator = Some(Terminator {
513 source_info: terminator.source_info,
514 kind,
515 });
516 false
517 }
518 }
519 }
520
521 fn cast_box_free_arg(&self, arg: Place<'tcx>, ptr_ty: Ty<'tcx>,
522 callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Local {
523 let arg = Rvalue::Ref(
524 self.tcx.types.re_erased,
525 BorrowKind::Mut { allow_two_phase_borrow: false },
526 arg.deref());
527
528 let ty = arg.ty(caller_mir, self.tcx);
529 let ref_tmp = LocalDecl::new_temp(ty, callsite.location.span);
530 let ref_tmp = caller_mir.local_decls.push(ref_tmp);
531 let ref_tmp = Place::Local(ref_tmp);
532
533 let ref_stmt = Statement {
534 source_info: callsite.location,
535 kind: StatementKind::Assign(ref_tmp.clone(), arg)
536 };
537
538 caller_mir[callsite.bb]
539 .statements.push(ref_stmt);
540
541 let pointee_ty = match ptr_ty.sty {
542 ty::TyRawPtr(tm) | ty::TyRef(_, tm) => tm.ty,
543 _ if ptr_ty.is_box() => ptr_ty.boxed_ty(),
544 _ => bug!("Invalid type `{:?}` for call to box_free", ptr_ty)
545 };
546 let ptr_ty = self.tcx.mk_mut_ptr(pointee_ty);
547
548 let raw_ptr = Rvalue::Cast(CastKind::Misc, Operand::Move(ref_tmp), ptr_ty);
549
550 let cast_tmp = LocalDecl::new_temp(ptr_ty, callsite.location.span);
551 let cast_tmp = caller_mir.local_decls.push(cast_tmp);
552
553 let cast_stmt = Statement {
554 source_info: callsite.location,
555 kind: StatementKind::Assign(Place::Local(cast_tmp), raw_ptr)
556 };
557
558 caller_mir[callsite.bb]
559 .statements.push(cast_stmt);
560
561 cast_tmp
562 }
563
564 fn make_call_args(
565 &self,
566 args: Vec<Operand<'tcx>>,
567 callsite: &CallSite<'tcx>,
568 caller_mir: &mut Mir<'tcx>,
569 ) -> Vec<Local> {
570 let tcx = self.tcx;
571
572 // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
573 // The caller provides the arguments wrapped up in a tuple:
574 //
575 // tuple_tmp = (a, b, c)
576 // Fn::call(closure_ref, tuple_tmp)
577 //
578 // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
579 // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, trans has
580 // the job of unpacking this tuple. But here, we are trans. =) So we want to create
581 // a vector like
582 //
583 // [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
584 //
585 // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
586 // if we "spill" that into *another* temporary, so that we can map the argument
587 // variable in the callee MIR directly to an argument variable on our side.
588 // So we introduce temporaries like:
589 //
590 // tmp0 = tuple_tmp.0
591 // tmp1 = tuple_tmp.1
592 // tmp2 = tuple_tmp.2
593 //
594 // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
595 if tcx.is_closure(callsite.callee) {
596 let mut args = args.into_iter();
597 let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir);
598 let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir);
599 assert!(args.next().is_none());
600
601 let tuple = Place::Local(tuple);
602 let tuple_tys = if let ty::TyTuple(s) = tuple.ty(caller_mir, tcx).to_ty(tcx).sty {
603 s
604 } else {
605 bug!("Closure arguments are not passed as a tuple");
606 };
607
608 // The `closure_ref` in our example above.
609 let closure_ref_arg = iter::once(self_);
610
611 // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
612 let tuple_tmp_args =
613 tuple_tys.iter().enumerate().map(|(i, ty)| {
614 // This is e.g. `tuple_tmp.0` in our example above.
615 let tuple_field = Operand::Move(tuple.clone().field(Field::new(i), ty));
616
617 // Spill to a local to make e.g. `tmp0`.
618 self.create_temp_if_necessary(tuple_field, callsite, caller_mir)
619 });
620
621 closure_ref_arg.chain(tuple_tmp_args).collect()
622 } else {
623 args.into_iter()
624 .map(|a| self.create_temp_if_necessary(a, callsite, caller_mir))
625 .collect()
626 }
627 }
628
629 /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
630 /// temporary `T` and an instruction `T = arg`, and returns `T`.
631 fn create_temp_if_necessary(
632 &self,
633 arg: Operand<'tcx>,
634 callsite: &CallSite<'tcx>,
635 caller_mir: &mut Mir<'tcx>,
636 ) -> Local {
637 // FIXME: Analysis of the usage of the arguments to avoid
638 // unnecessary temporaries.
639
640 if let Operand::Move(Place::Local(local)) = arg {
641 if caller_mir.local_kind(local) == LocalKind::Temp {
642 // Reuse the operand if it's a temporary already
643 return local;
644 }
645 }
646
647 debug!("Creating temp for argument {:?}", arg);
648 // Otherwise, create a temporary for the arg
649 let arg = Rvalue::Use(arg);
650
651 let ty = arg.ty(caller_mir, self.tcx);
652
653 let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span);
654 let arg_tmp = caller_mir.local_decls.push(arg_tmp);
655
656 let stmt = Statement {
657 source_info: callsite.location,
658 kind: StatementKind::Assign(Place::Local(arg_tmp), arg),
659 };
660 caller_mir[callsite.bb].statements.push(stmt);
661 arg_tmp
662 }
663 }
664
665 fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
666 param_env: ty::ParamEnv<'tcx>,
667 ty: Ty<'tcx>) -> Option<u64> {
668 tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
669 }
670
671 /**
672 * Integrator.
673 *
674 * Integrates blocks from the callee function into the calling function.
675 * Updates block indices, references to locals and other control flow
676 * stuff.
677 */
678 struct Integrator<'a, 'tcx: 'a> {
679 block_idx: usize,
680 args: &'a [Local],
681 local_map: IndexVec<Local, Local>,
682 scope_map: IndexVec<VisibilityScope, VisibilityScope>,
683 promoted_map: IndexVec<Promoted, Promoted>,
684 _callsite: CallSite<'tcx>,
685 destination: Place<'tcx>,
686 return_block: BasicBlock,
687 cleanup_block: Option<BasicBlock>,
688 in_cleanup_block: bool,
689 }
690
691 impl<'a, 'tcx> Integrator<'a, 'tcx> {
692 fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
693 let new = BasicBlock::new(tgt.index() + self.block_idx);
694 debug!("Updating target `{:?}`, new: `{:?}`", tgt, new);
695 new
696 }
697 }
698
699 impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
700 fn visit_local(&mut self,
701 local: &mut Local,
702 _ctxt: PlaceContext<'tcx>,
703 _location: Location) {
704 if *local == RETURN_PLACE {
705 match self.destination {
706 Place::Local(l) => {
707 *local = l;
708 return;
709 },
710 ref place => bug!("Return place is {:?}, not local", place)
711 }
712 }
713 let idx = local.index() - 1;
714 if idx < self.args.len() {
715 *local = self.args[idx];
716 return;
717 }
718 *local = self.local_map[Local::new(idx - self.args.len())];
719 }
720
721 fn visit_place(&mut self,
722 place: &mut Place<'tcx>,
723 _ctxt: PlaceContext<'tcx>,
724 _location: Location) {
725 if let Place::Local(RETURN_PLACE) = *place {
726 // Return pointer; update the place itself
727 *place = self.destination.clone();
728 } else {
729 self.super_place(place, _ctxt, _location);
730 }
731 }
732
733 fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
734 self.in_cleanup_block = data.is_cleanup;
735 self.super_basic_block_data(block, data);
736 self.in_cleanup_block = false;
737 }
738
739 fn visit_terminator_kind(&mut self, block: BasicBlock,
740 kind: &mut TerminatorKind<'tcx>, loc: Location) {
741 self.super_terminator_kind(block, kind, loc);
742
743 match *kind {
744 TerminatorKind::GeneratorDrop |
745 TerminatorKind::Yield { .. } => bug!(),
746 TerminatorKind::Goto { ref mut target} => {
747 *target = self.update_target(*target);
748 }
749 TerminatorKind::SwitchInt { ref mut targets, .. } => {
750 for tgt in targets {
751 *tgt = self.update_target(*tgt);
752 }
753 }
754 TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
755 TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
756 *target = self.update_target(*target);
757 if let Some(tgt) = *unwind {
758 *unwind = Some(self.update_target(tgt));
759 } else if !self.in_cleanup_block {
760 // Unless this drop is in a cleanup block, add an unwind edge to
761 // the orignal call's cleanup block
762 *unwind = self.cleanup_block;
763 }
764 }
765 TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
766 if let Some((_, ref mut tgt)) = *destination {
767 *tgt = self.update_target(*tgt);
768 }
769 if let Some(tgt) = *cleanup {
770 *cleanup = Some(self.update_target(tgt));
771 } else if !self.in_cleanup_block {
772 // Unless this call is in a cleanup block, add an unwind edge to
773 // the orignal call's cleanup block
774 *cleanup = self.cleanup_block;
775 }
776 }
777 TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
778 *target = self.update_target(*target);
779 if let Some(tgt) = *cleanup {
780 *cleanup = Some(self.update_target(tgt));
781 } else if !self.in_cleanup_block {
782 // Unless this assert is in a cleanup block, add an unwind edge to
783 // the orignal call's cleanup block
784 *cleanup = self.cleanup_block;
785 }
786 }
787 TerminatorKind::Return => {
788 *kind = TerminatorKind::Goto { target: self.return_block };
789 }
790 TerminatorKind::Resume => {
791 if let Some(tgt) = self.cleanup_block {
792 *kind = TerminatorKind::Goto { target: tgt }
793 }
794 }
795 TerminatorKind::Abort => { }
796 TerminatorKind::Unreachable => { }
797 TerminatorKind::FalseEdges { ref mut real_target, ref mut imaginary_targets } => {
798 *real_target = self.update_target(*real_target);
799 for target in imaginary_targets {
800 *target = self.update_target(*target);
801 }
802 }
803 TerminatorKind::FalseUnwind { real_target: _ , unwind: _ } =>
804 // see the ordering of passes in the optimized_mir query.
805 bug!("False unwinds should have been removed before inlining")
806 }
807 }
808
809 fn visit_visibility_scope(&mut self, scope: &mut VisibilityScope) {
810 *scope = self.scope_map[*scope];
811 }
812
813 fn visit_literal(&mut self, literal: &mut Literal<'tcx>, loc: Location) {
814 if let Literal::Promoted { ref mut index } = *literal {
815 if let Some(p) = self.promoted_map.get(*index).cloned() {
816 *index = p;
817 }
818 } else {
819 self.super_literal(literal, loc);
820 }
821 }
822 }