]> git.proxmox.com Git - rustc.git/blame - src/librustc_mir/transform/inline.rs
New upstream version 1.21.0+dfsg1
[rustc.git] / src / librustc_mir / transform / inline.rs
CommitLineData
8bb4bdeb
XL
1// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11//! Inlining pass for MIR functions
12
13use rustc::hir::def_id::DefId;
14
15use rustc_data_structures::bitvec::BitVector;
16use rustc_data_structures::indexed_vec::{Idx, IndexVec};
8bb4bdeb 17
8bb4bdeb 18use rustc::mir::*;
7cac9316 19use rustc::mir::transform::{MirPass, MirSource};
8bb4bdeb 20use rustc::mir::visit::*;
8bb4bdeb
XL
21use rustc::ty::{self, Ty, TyCtxt};
22use rustc::ty::subst::{Subst,Substs};
8bb4bdeb 23
7cac9316 24use std::collections::VecDeque;
8bb4bdeb
XL
25use super::simplify::{remove_dead_blocks, CfgSimplifier};
26
27use syntax::{attr};
28use syntax::abi::Abi;
29
8bb4bdeb
XL
30const DEFAULT_THRESHOLD: usize = 50;
31const HINT_THRESHOLD: usize = 100;
32
33const INSTR_COST: usize = 5;
34const CALL_PENALTY: usize = 25;
35
36const UNKNOWN_SIZE_COST: usize = 10;
37
38pub struct Inline;
39
8bb4bdeb
XL
40#[derive(Copy, Clone)]
41struct CallSite<'tcx> {
8bb4bdeb
XL
42 callee: DefId,
43 substs: &'tcx Substs<'tcx>,
44 bb: BasicBlock,
45 location: SourceInfo,
46}
47
7cac9316
XL
48impl MirPass for Inline {
49 fn run_pass<'a, 'tcx>(&self,
50 tcx: TyCtxt<'a, 'tcx, 'tcx>,
51 source: MirSource,
52 mir: &mut Mir<'tcx>) {
53 if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
54 Inliner { tcx, source }.run_pass(mir);
55 }
56 }
57}
8bb4bdeb 58
7cac9316
XL
59struct Inliner<'a, 'tcx: 'a> {
60 tcx: TyCtxt<'a, 'tcx, 'tcx>,
61 source: MirSource,
62}
8bb4bdeb 63
7cac9316
XL
64impl<'a, 'tcx> Inliner<'a, 'tcx> {
65 fn run_pass(&self, caller_mir: &mut Mir<'tcx>) {
66 // Keep a queue of callsites to try inlining on. We take
67 // advantage of the fact that queries detect cycles here to
68 // allow us to try and fetch the fully optimized MIR of a
69 // call; if it succeeds, we can inline it and we know that
70 // they do not call us. Otherwise, we just don't try to
71 // inline.
72 //
73 // We use a queue so that we inline "broadly" before we inline
74 // in depth. It is unclear if this is the best heuristic,
75 // really, but that's true of all the heuristics in this
76 // file. =)
77
78 let mut callsites = VecDeque::new();
79
80 // Only do inlining into fn bodies.
81 if let MirSource::Fn(_) = self.source {
82 for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated() {
83 // Don't inline calls that are in cleanup blocks.
84 if bb_data.is_cleanup { continue; }
85
86 // Only consider direct calls to functions
87 let terminator = bb_data.terminator();
88 if let TerminatorKind::Call {
89 func: Operand::Constant(ref f), .. } = terminator.kind {
041b39d2 90 if let ty::TyFnDef(callee_def_id, substs) = f.ty.sty {
7cac9316
XL
91 callsites.push_back(CallSite {
92 callee: callee_def_id,
3b2f2976
XL
93 substs,
94 bb,
7cac9316
XL
95 location: terminator.source_info
96 });
8bb4bdeb 97 }
8bb4bdeb
XL
98 }
99 }
100 }
101
8bb4bdeb
XL
102 let mut local_change;
103 let mut changed = false;
104
105 loop {
106 local_change = false;
7cac9316
XL
107 while let Some(callsite) = callsites.pop_front() {
108 if !self.tcx.is_mir_available(callsite.callee) {
109 continue;
110 }
8bb4bdeb 111
7cac9316
XL
112 let callee_mir = match ty::queries::optimized_mir::try_get(self.tcx,
113 callsite.location.span,
114 callsite.callee) {
115 Ok(ref callee_mir) if self.should_inline(callsite, callee_mir) => {
8bb4bdeb 116 callee_mir.subst(self.tcx, callsite.substs)
8bb4bdeb 117 }
3b2f2976 118 Ok(_) => continue,
8bb4bdeb 119
3b2f2976
XL
120 Err(mut bug) => {
121 // FIXME(#43542) shouldn't have to cancel an error
122 bug.cancel();
123 continue
124 }
8bb4bdeb
XL
125 };
126
127 let start = caller_mir.basic_blocks().len();
128
7cac9316 129 if !self.inline_call(callsite, caller_mir, callee_mir) {
8bb4bdeb
XL
130 continue;
131 }
132
8bb4bdeb
XL
133 // Add callsites from inlined function
134 for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
135 // Only consider direct calls to functions
136 let terminator = bb_data.terminator();
137 if let TerminatorKind::Call {
138 func: Operand::Constant(ref f), .. } = terminator.kind {
041b39d2 139 if let ty::TyFnDef(callee_def_id, substs) = f.ty.sty {
8bb4bdeb
XL
140 // Don't inline the same function multiple times.
141 if callsite.callee != callee_def_id {
7cac9316 142 callsites.push_back(CallSite {
8bb4bdeb 143 callee: callee_def_id,
3b2f2976
XL
144 substs,
145 bb,
8bb4bdeb
XL
146 location: terminator.source_info
147 });
148 }
149 }
150 }
151 }
152
8bb4bdeb
XL
153 local_change = true;
154 changed = true;
155 }
156
157 if !local_change {
158 break;
159 }
160 }
161
7cac9316
XL
162 // Simplify if we inlined anything.
163 if changed {
164 debug!("Running simplify cfg on {:?}", self.source);
165 CfgSimplifier::new(caller_mir).simplify();
166 remove_dead_blocks(caller_mir);
8bb4bdeb 167 }
8bb4bdeb
XL
168 }
169
7cac9316
XL
170 fn should_inline(&self,
171 callsite: CallSite<'tcx>,
172 callee_mir: &Mir<'tcx>)
173 -> bool
174 {
8bb4bdeb
XL
175 let tcx = self.tcx;
176
177 // Don't inline closures that have captures
178 // FIXME: Handle closures better
179 if callee_mir.upvar_decls.len() > 0 {
180 return false;
181 }
182
183
184 let attrs = tcx.get_attrs(callsite.callee);
185 let hint = attr::find_inline_attr(None, &attrs[..]);
186
187 let hinted = match hint {
188 // Just treat inline(always) as a hint for now,
189 // there are cases that prevent inlining that we
190 // need to check for first.
191 attr::InlineAttr::Always => true,
192 attr::InlineAttr::Never => return false,
193 attr::InlineAttr::Hint => true,
194 attr::InlineAttr::None => false,
195 };
196
197 // Only inline local functions if they would be eligible for cross-crate
198 // inlining. This is to ensure that the final crate doesn't have MIR that
199 // reference unexported symbols
200 if callsite.callee.is_local() {
201 if callsite.substs.types().count() == 0 && !hinted {
202 return false;
203 }
204 }
205
206 let mut threshold = if hinted {
207 HINT_THRESHOLD
208 } else {
209 DEFAULT_THRESHOLD
210 };
211
212 // Significantly lower the threshold for inlining cold functions
213 if attr::contains_name(&attrs[..], "cold") {
214 threshold /= 5;
215 }
216
217 // Give a bonus functions with a small number of blocks,
218 // We normally have two or three blocks for even
219 // very small functions.
220 if callee_mir.basic_blocks().len() <= 3 {
221 threshold += threshold / 4;
222 }
223
224 // FIXME: Give a bonus to functions with only a single caller
225
7cac9316
XL
226 let def_id = tcx.hir.local_def_id(self.source.item_id());
227 let param_env = tcx.param_env(def_id);
8bb4bdeb
XL
228
229 let mut first_block = true;
230 let mut cost = 0;
231
232 // Traverse the MIR manually so we can account for the effects of
233 // inlining on the CFG.
234 let mut work_list = vec![START_BLOCK];
235 let mut visited = BitVector::new(callee_mir.basic_blocks().len());
236 while let Some(bb) = work_list.pop() {
237 if !visited.insert(bb.index()) { continue; }
238 let blk = &callee_mir.basic_blocks()[bb];
239
240 for stmt in &blk.statements {
241 // Don't count StorageLive/StorageDead in the inlining cost.
242 match stmt.kind {
243 StatementKind::StorageLive(_) |
244 StatementKind::StorageDead(_) |
245 StatementKind::Nop => {}
246 _ => cost += INSTR_COST
247 }
248 }
249 let term = blk.terminator();
250 let mut is_drop = false;
251 match term.kind {
252 TerminatorKind::Drop { ref location, target, unwind } |
253 TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
254 is_drop = true;
255 work_list.push(target);
256 // If the location doesn't actually need dropping, treat it like
257 // a regular goto.
041b39d2 258 let ty = location.ty(callee_mir, tcx).subst(tcx, callsite.substs);
8bb4bdeb 259 let ty = ty.to_ty(tcx);
7cac9316 260 if ty.needs_drop(tcx, param_env) {
8bb4bdeb
XL
261 cost += CALL_PENALTY;
262 if let Some(unwind) = unwind {
263 work_list.push(unwind);
264 }
265 } else {
266 cost += INSTR_COST;
267 }
268 }
269
270 TerminatorKind::Unreachable |
271 TerminatorKind::Call { destination: None, .. } if first_block => {
272 // If the function always diverges, don't inline
273 // unless the cost is zero
274 threshold = 0;
275 }
276
277 TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
041b39d2 278 if let ty::TyFnDef(def_id, _) = f.ty.sty {
8bb4bdeb 279 // Don't give intrinsics the extra penalty for calls
041b39d2 280 let f = tcx.fn_sig(def_id);
8bb4bdeb
XL
281 if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
282 cost += INSTR_COST;
283 } else {
284 cost += CALL_PENALTY;
285 }
286 }
287 }
288 TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
289 _ => cost += INSTR_COST
290 }
291
292 if !is_drop {
293 for &succ in &term.successors()[..] {
294 work_list.push(succ);
295 }
296 }
297
298 first_block = false;
299 }
300
301 // Count up the cost of local variables and temps, if we know the size
302 // use that, otherwise we use a moderately-large dummy cost.
303
304 let ptr_size = tcx.data_layout.pointer_size.bytes();
305
306 for v in callee_mir.vars_and_temps_iter() {
307 let v = &callee_mir.local_decls[v];
308 let ty = v.ty.subst(tcx, callsite.substs);
309 // Cost of the var is the size in machine-words, if we know
310 // it.
311 if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
312 cost += (size / ptr_size) as usize;
313 } else {
314 cost += UNKNOWN_SIZE_COST;
315 }
316 }
317
318 debug!("Inline cost for {:?} is {}", callsite.callee, cost);
319
320 if let attr::InlineAttr::Always = hint {
321 true
322 } else {
323 cost <= threshold
324 }
325 }
326
7cac9316
XL
327 fn inline_call(&self,
328 callsite: CallSite<'tcx>,
329 caller_mir: &mut Mir<'tcx>,
330 mut callee_mir: Mir<'tcx>) -> bool {
8bb4bdeb
XL
331 let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
332 match terminator.kind {
333 // FIXME: Handle inlining of diverging calls
334 TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
7cac9316 335 debug!("Inlined {:?} into {:?}", callsite.callee, self.source);
8bb4bdeb
XL
336
337 let is_box_free = Some(callsite.callee) == self.tcx.lang_items.box_free_fn();
338
339 let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
340 let mut scope_map = IndexVec::with_capacity(callee_mir.visibility_scopes.len());
341 let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
342
343 for mut scope in callee_mir.visibility_scopes.iter().cloned() {
344 if scope.parent_scope.is_none() {
345 scope.parent_scope = Some(callsite.location.scope);
346 scope.span = callee_mir.span;
347 }
348
349 scope.span = callsite.location.span;
350
351 let idx = caller_mir.visibility_scopes.push(scope);
352 scope_map.push(idx);
353 }
354
355 for loc in callee_mir.vars_and_temps_iter() {
356 let mut local = callee_mir.local_decls[loc].clone();
357
cc61c64b
XL
358 local.source_info.scope = scope_map[local.source_info.scope];
359 local.source_info.span = callsite.location.span;
8bb4bdeb
XL
360
361 let idx = caller_mir.local_decls.push(local);
362 local_map.push(idx);
363 }
364
365 for p in callee_mir.promoted.iter().cloned() {
366 let idx = caller_mir.promoted.push(p);
367 promoted_map.push(idx);
368 }
369
370 // If the call is something like `a[*i] = f(i)`, where
371 // `i : &mut usize`, then just duplicating the `a[*i]`
372 // Lvalue could result in two different locations if `f`
373 // writes to `i`. To prevent this we need to create a temporary
374 // borrow of the lvalue and pass the destination as `*temp` instead.
375 fn dest_needs_borrow(lval: &Lvalue) -> bool {
376 match *lval {
377 Lvalue::Projection(ref p) => {
378 match p.elem {
379 ProjectionElem::Deref |
380 ProjectionElem::Index(_) => true,
381 _ => dest_needs_borrow(&p.base)
382 }
383 }
384 // Static variables need a borrow because the callee
385 // might modify the same static.
386 Lvalue::Static(_) => true,
387 _ => false
388 }
389 }
390
391 let dest = if dest_needs_borrow(&destination.0) {
392 debug!("Creating temp for return destination");
393 let dest = Rvalue::Ref(
cc61c64b 394 self.tcx.types.re_erased,
8bb4bdeb
XL
395 BorrowKind::Mut,
396 destination.0);
397
398 let ty = dest.ty(caller_mir, self.tcx);
399
cc61c64b 400 let temp = LocalDecl::new_temp(ty, callsite.location.span);
8bb4bdeb
XL
401
402 let tmp = caller_mir.local_decls.push(temp);
403 let tmp = Lvalue::Local(tmp);
404
405 let stmt = Statement {
406 source_info: callsite.location,
407 kind: StatementKind::Assign(tmp.clone(), dest)
408 };
409 caller_mir[callsite.bb]
410 .statements.push(stmt);
411 tmp.deref()
412 } else {
413 destination.0
414 };
415
416 let return_block = destination.1;
417
418 let args : Vec<_> = if is_box_free {
419 assert!(args.len() == 1);
420 // box_free takes a Box, but is defined with a *mut T, inlining
421 // needs to generate the cast.
422 // FIXME: we should probably just generate correct MIR in the first place...
423
424 let arg = if let Operand::Consume(ref lval) = args[0] {
425 lval.clone()
426 } else {
427 bug!("Constant arg to \"box_free\"");
428 };
429
430 let ptr_ty = args[0].ty(caller_mir, self.tcx);
431 vec![self.cast_box_free_arg(arg, ptr_ty, &callsite, caller_mir)]
432 } else {
433 // Copy the arguments if needed.
434 self.make_call_args(args, &callsite, caller_mir)
435 };
436
437 let bb_len = caller_mir.basic_blocks().len();
438 let mut integrator = Integrator {
439 block_idx: bb_len,
440 args: &args,
3b2f2976
XL
441 local_map,
442 scope_map,
443 promoted_map,
8bb4bdeb
XL
444 _callsite: callsite,
445 destination: dest,
3b2f2976 446 return_block,
8bb4bdeb
XL
447 cleanup_block: cleanup,
448 in_cleanup_block: false
449 };
450
451
452 for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
453 integrator.visit_basic_block_data(bb, &mut block);
454 caller_mir.basic_blocks_mut().push(block);
455 }
456
457 let terminator = Terminator {
458 source_info: callsite.location,
459 kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
460 };
461
462 caller_mir[callsite.bb].terminator = Some(terminator);
463
464 true
465 }
466 kind => {
467 caller_mir[callsite.bb].terminator = Some(Terminator {
468 source_info: terminator.source_info,
3b2f2976 469 kind,
8bb4bdeb
XL
470 });
471 false
472 }
473 }
474 }
475
476 fn cast_box_free_arg(&self, arg: Lvalue<'tcx>, ptr_ty: Ty<'tcx>,
477 callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Operand<'tcx> {
478 let arg = Rvalue::Ref(
cc61c64b 479 self.tcx.types.re_erased,
8bb4bdeb
XL
480 BorrowKind::Mut,
481 arg.deref());
482
483 let ty = arg.ty(caller_mir, self.tcx);
cc61c64b 484 let ref_tmp = LocalDecl::new_temp(ty, callsite.location.span);
8bb4bdeb
XL
485 let ref_tmp = caller_mir.local_decls.push(ref_tmp);
486 let ref_tmp = Lvalue::Local(ref_tmp);
487
488 let ref_stmt = Statement {
489 source_info: callsite.location,
490 kind: StatementKind::Assign(ref_tmp.clone(), arg)
491 };
492
493 caller_mir[callsite.bb]
494 .statements.push(ref_stmt);
495
496 let pointee_ty = match ptr_ty.sty {
497 ty::TyRawPtr(tm) | ty::TyRef(_, tm) => tm.ty,
498 _ if ptr_ty.is_box() => ptr_ty.boxed_ty(),
499 _ => bug!("Invalid type `{:?}` for call to box_free", ptr_ty)
500 };
501 let ptr_ty = self.tcx.mk_mut_ptr(pointee_ty);
502
503 let raw_ptr = Rvalue::Cast(CastKind::Misc, Operand::Consume(ref_tmp), ptr_ty);
504
cc61c64b 505 let cast_tmp = LocalDecl::new_temp(ptr_ty, callsite.location.span);
8bb4bdeb
XL
506 let cast_tmp = caller_mir.local_decls.push(cast_tmp);
507 let cast_tmp = Lvalue::Local(cast_tmp);
508
509 let cast_stmt = Statement {
510 source_info: callsite.location,
511 kind: StatementKind::Assign(cast_tmp.clone(), raw_ptr)
512 };
513
514 caller_mir[callsite.bb]
515 .statements.push(cast_stmt);
516
517 Operand::Consume(cast_tmp)
518 }
519
520 fn make_call_args(&self, args: Vec<Operand<'tcx>>,
521 callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Vec<Operand<'tcx>> {
522 let tcx = self.tcx;
523 // FIXME: Analysis of the usage of the arguments to avoid
524 // unnecessary temporaries.
525 args.into_iter().map(|a| {
526 if let Operand::Consume(Lvalue::Local(local)) = a {
527 if caller_mir.local_kind(local) == LocalKind::Temp {
528 // Reuse the operand if it's a temporary already
529 return a;
530 }
531 }
532
533 debug!("Creating temp for argument");
534 // Otherwise, create a temporary for the arg
535 let arg = Rvalue::Use(a);
536
537 let ty = arg.ty(caller_mir, tcx);
538
cc61c64b 539 let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span);
8bb4bdeb
XL
540 let arg_tmp = caller_mir.local_decls.push(arg_tmp);
541 let arg_tmp = Lvalue::Local(arg_tmp);
542
543 let stmt = Statement {
544 source_info: callsite.location,
545 kind: StatementKind::Assign(arg_tmp.clone(), arg)
546 };
547 caller_mir[callsite.bb].statements.push(stmt);
548 Operand::Consume(arg_tmp)
549 }).collect()
550 }
551}
552
7cac9316
XL
553fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
554 param_env: ty::ParamEnv<'tcx>,
8bb4bdeb 555 ty: Ty<'tcx>) -> Option<u64> {
7cac9316
XL
556 ty.layout(tcx, param_env).ok().map(|layout| {
557 layout.size(&tcx.data_layout).bytes()
8bb4bdeb
XL
558 })
559}
560
561/**
562 * Integrator.
563 *
564 * Integrates blocks from the callee function into the calling function.
565 * Updates block indices, references to locals and other control flow
566 * stuff.
567 */
568struct Integrator<'a, 'tcx: 'a> {
569 block_idx: usize,
570 args: &'a [Operand<'tcx>],
571 local_map: IndexVec<Local, Local>,
572 scope_map: IndexVec<VisibilityScope, VisibilityScope>,
573 promoted_map: IndexVec<Promoted, Promoted>,
574 _callsite: CallSite<'tcx>,
575 destination: Lvalue<'tcx>,
576 return_block: BasicBlock,
577 cleanup_block: Option<BasicBlock>,
578 in_cleanup_block: bool,
579}
580
581impl<'a, 'tcx> Integrator<'a, 'tcx> {
582 fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
583 let new = BasicBlock::new(tgt.index() + self.block_idx);
584 debug!("Updating target `{:?}`, new: `{:?}`", tgt, new);
585 new
586 }
587
588 fn update_local(&self, local: Local) -> Option<Local> {
589 let idx = local.index();
590 if idx < (self.args.len() + 1) {
591 return None;
592 }
593 let idx = idx - (self.args.len() + 1);
594 let local = Local::new(idx);
595 self.local_map.get(local).cloned()
596 }
597
598 fn arg_index(&self, arg: Local) -> Option<usize> {
599 let idx = arg.index();
600 if idx > 0 && idx <= self.args.len() {
601 Some(idx - 1)
602 } else {
603 None
604 }
605 }
606}
607
608impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
609 fn visit_lvalue(&mut self,
610 lvalue: &mut Lvalue<'tcx>,
611 _ctxt: LvalueContext<'tcx>,
612 _location: Location) {
613 if let Lvalue::Local(ref mut local) = *lvalue {
614 if let Some(l) = self.update_local(*local) {
615 // Temp or Var; update the local reference
616 *local = l;
617 return;
618 }
619 }
620 if let Lvalue::Local(local) = *lvalue {
621 if local == RETURN_POINTER {
622 // Return pointer; update the lvalue itself
623 *lvalue = self.destination.clone();
624 } else if local.index() < (self.args.len() + 1) {
625 // Argument, once again update the the lvalue itself
626 let idx = local.index() - 1;
627 if let Operand::Consume(ref lval) = self.args[idx] {
628 *lvalue = lval.clone();
629 } else {
630 bug!("Arg operand `{:?}` is not an Lvalue use.", idx)
631 }
632 }
633 } else {
634 self.super_lvalue(lvalue, _ctxt, _location)
635 }
636 }
637
638 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
639 if let Operand::Consume(Lvalue::Local(arg)) = *operand {
640 if let Some(idx) = self.arg_index(arg) {
641 let new_arg = self.args[idx].clone();
642 *operand = new_arg;
643 return;
644 }
645 }
646 self.super_operand(operand, location);
647 }
648
649 fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
650 self.in_cleanup_block = data.is_cleanup;
651 self.super_basic_block_data(block, data);
652 self.in_cleanup_block = false;
653 }
654
655 fn visit_terminator_kind(&mut self, block: BasicBlock,
656 kind: &mut TerminatorKind<'tcx>, loc: Location) {
657 self.super_terminator_kind(block, kind, loc);
658
659 match *kind {
660 TerminatorKind::Goto { ref mut target} => {
661 *target = self.update_target(*target);
662 }
663 TerminatorKind::SwitchInt { ref mut targets, .. } => {
664 for tgt in targets {
665 *tgt = self.update_target(*tgt);
666 }
667 }
668 TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
669 TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
670 *target = self.update_target(*target);
671 if let Some(tgt) = *unwind {
672 *unwind = Some(self.update_target(tgt));
673 } else if !self.in_cleanup_block {
674 // Unless this drop is in a cleanup block, add an unwind edge to
675 // the orignal call's cleanup block
676 *unwind = self.cleanup_block;
677 }
678 }
679 TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
680 if let Some((_, ref mut tgt)) = *destination {
681 *tgt = self.update_target(*tgt);
682 }
683 if let Some(tgt) = *cleanup {
684 *cleanup = Some(self.update_target(tgt));
685 } else if !self.in_cleanup_block {
686 // Unless this call is in a cleanup block, add an unwind edge to
687 // the orignal call's cleanup block
688 *cleanup = self.cleanup_block;
689 }
690 }
691 TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
692 *target = self.update_target(*target);
693 if let Some(tgt) = *cleanup {
694 *cleanup = Some(self.update_target(tgt));
695 } else if !self.in_cleanup_block {
696 // Unless this assert is in a cleanup block, add an unwind edge to
697 // the orignal call's cleanup block
698 *cleanup = self.cleanup_block;
699 }
700 }
701 TerminatorKind::Return => {
702 *kind = TerminatorKind::Goto { target: self.return_block };
703 }
704 TerminatorKind::Resume => {
705 if let Some(tgt) = self.cleanup_block {
706 *kind = TerminatorKind::Goto { target: tgt }
707 }
708 }
709 TerminatorKind::Unreachable => { }
710 }
711 }
712
713 fn visit_visibility_scope(&mut self, scope: &mut VisibilityScope) {
714 *scope = self.scope_map[*scope];
715 }
716
717 fn visit_literal(&mut self, literal: &mut Literal<'tcx>, loc: Location) {
718 if let Literal::Promoted { ref mut index } = *literal {
719 if let Some(p) = self.promoted_map.get(*index).cloned() {
720 *index = p;
721 }
722 } else {
723 self.super_literal(literal, loc);
724 }
725 }
726}