]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_mir/src/transform/inline.rs
New upstream version 1.48.0+dfsg1
[rustc.git] / compiler / rustc_mir / src / transform / inline.rs
CommitLineData
8bb4bdeb
XL
1//! Inlining pass for MIR functions
2
ba9703b0 3use rustc_attr as attr;
dfeec247 4use rustc_hir::def_id::DefId;
e74abb32
XL
5use rustc_index::bit_set::BitSet;
6use rustc_index::vec::{Idx, IndexVec};
1b1a35ee 7use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
ba9703b0
XL
8use rustc_middle::mir::visit::*;
9use rustc_middle::mir::*;
10use rustc_middle::ty::subst::{Subst, SubstsRef};
f9f354fc 11use rustc_middle::ty::{self, ConstKind, Instance, InstanceDef, ParamEnv, Ty, TyCtxt};
ba9703b0 12use rustc_target::spec::abi::Abi;
8bb4bdeb 13
dfeec247
XL
14use super::simplify::{remove_dead_blocks, CfgSimplifier};
15use crate::transform::{MirPass, MirSource};
7cac9316 16use std::collections::VecDeque;
ff7c6d11 17use std::iter;
8bb4bdeb 18
8bb4bdeb
XL
19const DEFAULT_THRESHOLD: usize = 50;
20const HINT_THRESHOLD: usize = 100;
21
22const INSTR_COST: usize = 5;
23const CALL_PENALTY: usize = 25;
ba9703b0
XL
24const LANDINGPAD_PENALTY: usize = 50;
25const RESUME_PENALTY: usize = 45;
8bb4bdeb
XL
26
27const UNKNOWN_SIZE_COST: usize = 10;
28
29pub struct Inline;
30
abe05a73 31#[derive(Copy, Clone, Debug)]
8bb4bdeb 32struct CallSite<'tcx> {
8bb4bdeb 33 callee: DefId,
532ac7d7 34 substs: SubstsRef<'tcx>,
8bb4bdeb
XL
35 bb: BasicBlock,
36 location: SourceInfo,
37}
38
e1599b0c 39impl<'tcx> MirPass<'tcx> for Inline {
f9f354fc 40 fn run_pass(&self, tcx: TyCtxt<'tcx>, source: MirSource<'tcx>, body: &mut Body<'tcx>) {
7cac9316 41 if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 {
f035d41b
XL
42 if tcx.sess.opts.debugging_opts.instrument_coverage {
43 // The current implementation of source code coverage injects code region counters
44 // into the MIR, and assumes a 1-to-1 correspondence between MIR and source-code-
45 // based function.
46 debug!("function inlining is disabled when compiling with `instrument_coverage`");
47 } else {
1b1a35ee
XL
48 Inliner { tcx, source, codegen_fn_attrs: tcx.codegen_fn_attrs(source.def_id()) }
49 .run_pass(body);
f035d41b 50 }
7cac9316
XL
51 }
52 }
53}
8bb4bdeb 54
dc9dc135
XL
55struct Inliner<'tcx> {
56 tcx: TyCtxt<'tcx>,
9fa01778 57 source: MirSource<'tcx>,
1b1a35ee 58 codegen_fn_attrs: &'tcx CodegenFnAttrs,
7cac9316 59}
8bb4bdeb 60
dc9dc135 61impl Inliner<'tcx> {
f9f354fc 62 fn run_pass(&self, caller_body: &mut Body<'tcx>) {
7cac9316
XL
63 // Keep a queue of callsites to try inlining on. We take
64 // advantage of the fact that queries detect cycles here to
65 // allow us to try and fetch the fully optimized MIR of a
66 // call; if it succeeds, we can inline it and we know that
67 // they do not call us. Otherwise, we just don't try to
68 // inline.
69 //
70 // We use a queue so that we inline "broadly" before we inline
71 // in depth. It is unclear if this is the best heuristic,
72 // really, but that's true of all the heuristics in this
73 // file. =)
74
75 let mut callsites = VecDeque::new();
76
3dfed10e 77 let param_env = self.tcx.param_env_reveal_all_normalized(self.source.def_id());
abe05a73 78
7cac9316 79 // Only do inlining into fn bodies.
3dfed10e 80 let id = self.tcx.hir().local_def_id_to_hir_id(self.source.def_id().expect_local());
dfeec247 81 if self.tcx.hir().body_owner_kind(id).is_fn_or_closure() && self.source.promoted.is_none() {
dc9dc135 82 for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated() {
dfeec247
XL
83 if let Some(callsite) =
84 self.get_valid_function_call(bb, bb_data, caller_body, param_env)
85 {
a1dfa0c6
XL
86 callsites.push_back(callsite);
87 }
8bb4bdeb 88 }
abe05a73
XL
89 } else {
90 return;
8bb4bdeb
XL
91 }
92
8bb4bdeb
XL
93 let mut local_change;
94 let mut changed = false;
95
96 loop {
97 local_change = false;
7cac9316 98 while let Some(callsite) = callsites.pop_front() {
abe05a73 99 debug!("checking whether to inline callsite {:?}", callsite);
7cac9316 100 if !self.tcx.is_mir_available(callsite.callee) {
abe05a73 101 debug!("checking whether to inline callsite {:?} - MIR unavailable", callsite);
7cac9316
XL
102 continue;
103 }
8bb4bdeb 104
f9f354fc 105 let callee_body = if let Some(callee_def_id) = callsite.callee.as_local() {
3dfed10e 106 let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id);
f9f354fc 107 let self_hir_id =
3dfed10e 108 self.tcx.hir().local_def_id_to_hir_id(self.source.def_id().expect_local());
9fa01778 109 // Avoid a cycle here by only using `optimized_mir` only if we have
ba9703b0 110 // a lower `HirId` than the callee. This ensures that the callee will
9fa01778 111 // not inline us. This trick only works without incremental compilation.
1b1a35ee
XL
112 // So don't do it if that is enabled. Also avoid inlining into generators,
113 // since their `optimized_mir` is used for layout computation, which can
114 // create a cycle, even when no attempt is made to inline the function
115 // in the other direction.
116 if !self.tcx.dep_graph.is_fully_enabled()
117 && self_hir_id < callee_hir_id
118 && caller_body.generator_kind.is_none()
119 {
9fa01778
XL
120 self.tcx.optimized_mir(callsite.callee)
121 } else {
122 continue;
8bb4bdeb 123 }
9fa01778
XL
124 } else {
125 // This cannot result in a cycle since the callee MIR is from another crate
126 // and is already optimized.
127 self.tcx.optimized_mir(callsite.callee)
128 };
8bb4bdeb 129
dc9dc135 130 let callee_body = if self.consider_optimizing(callsite, callee_body) {
9fa01778
XL
131 self.tcx.subst_and_normalize_erasing_regions(
132 &callsite.substs,
133 param_env,
dc9dc135 134 callee_body,
9fa01778
XL
135 )
136 } else {
137 continue;
8bb4bdeb
XL
138 };
139
f9f354fc
XL
140 // Copy only unevaluated constants from the callee_body into the caller_body.
141 // Although we are only pushing `ConstKind::Unevaluated` consts to
142 // `required_consts`, here we may not only have `ConstKind::Unevaluated`
143 // because we are calling `subst_and_normalize_erasing_regions`.
144 caller_body.required_consts.extend(
145 callee_body.required_consts.iter().copied().filter(|&constant| {
146 matches!(constant.literal.val, ConstKind::Unevaluated(_, _, _))
147 }),
148 );
149
dc9dc135
XL
150 let start = caller_body.basic_blocks().len();
151 debug!("attempting to inline callsite {:?} - body={:?}", callsite, callee_body);
152 if !self.inline_call(callsite, caller_body, callee_body) {
abe05a73 153 debug!("attempting to inline callsite {:?} - failure", callsite);
8bb4bdeb
XL
154 continue;
155 }
abe05a73 156 debug!("attempting to inline callsite {:?} - success", callsite);
8bb4bdeb 157
8bb4bdeb 158 // Add callsites from inlined function
dfeec247
XL
159 for (bb, bb_data) in caller_body.basic_blocks().iter_enumerated().skip(start) {
160 if let Some(new_callsite) =
161 self.get_valid_function_call(bb, bb_data, caller_body, param_env)
162 {
a1dfa0c6
XL
163 // Don't inline the same function multiple times.
164 if callsite.callee != new_callsite.callee {
165 callsites.push_back(new_callsite);
8bb4bdeb
XL
166 }
167 }
168 }
169
8bb4bdeb
XL
170 local_change = true;
171 changed = true;
172 }
173
174 if !local_change {
175 break;
176 }
177 }
178
7cac9316
XL
179 // Simplify if we inlined anything.
180 if changed {
416331ca 181 debug!("running simplify cfg on {:?}", self.source);
dc9dc135
XL
182 CfgSimplifier::new(caller_body).simplify();
183 remove_dead_blocks(caller_body);
8bb4bdeb 184 }
8bb4bdeb
XL
185 }
186
dfeec247
XL
187 fn get_valid_function_call(
188 &self,
189 bb: BasicBlock,
190 bb_data: &BasicBlockData<'tcx>,
191 caller_body: &Body<'tcx>,
192 param_env: ParamEnv<'tcx>,
a1dfa0c6
XL
193 ) -> Option<CallSite<'tcx>> {
194 // Don't inline calls that are in cleanup blocks.
dfeec247
XL
195 if bb_data.is_cleanup {
196 return None;
197 }
a1dfa0c6
XL
198
199 // Only consider direct calls to functions
200 let terminator = bb_data.terminator();
201 if let TerminatorKind::Call { func: ref op, .. } = terminator.kind {
1b1a35ee 202 if let ty::FnDef(callee_def_id, substs) = *op.ty(caller_body, self.tcx).kind() {
f9f354fc
XL
203 let instance =
204 Instance::resolve(self.tcx, param_env, callee_def_id, substs).ok().flatten()?;
a1dfa0c6
XL
205
206 if let InstanceDef::Virtual(..) = instance.def {
207 return None;
208 }
209
210 return Some(CallSite {
211 callee: instance.def_id(),
212 substs: instance.substs,
213 bb,
dfeec247 214 location: terminator.source_info,
a1dfa0c6
XL
215 });
216 }
217 }
218
219 None
220 }
221
dfeec247 222 fn consider_optimizing(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool {
a1dfa0c6 223 debug!("consider_optimizing({:?})", callsite);
dc9dc135 224 self.should_inline(callsite, callee_body)
dfeec247
XL
225 && self.tcx.consider_optimizing(|| {
226 format!("Inline {:?} into {:?}", callee_body.span, callsite)
227 })
a1dfa0c6
XL
228 }
229
dfeec247 230 fn should_inline(&self, callsite: CallSite<'tcx>, callee_body: &Body<'tcx>) -> bool {
abe05a73 231 debug!("should_inline({:?})", callsite);
8bb4bdeb
XL
232 let tcx = self.tcx;
233
ea8adc8c 234 // Cannot inline generators which haven't been transformed yet
dc9dc135 235 if callee_body.yield_ty.is_some() {
abe05a73 236 debug!(" yield ty present - not inlining");
ea8adc8c
XL
237 return false;
238 }
8bb4bdeb 239
94b46f34 240 let codegen_fn_attrs = tcx.codegen_fn_attrs(callsite.callee);
8bb4bdeb 241
60c5eb7d
XL
242 if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::TRACK_CALLER) {
243 debug!("`#[track_caller]` present - not inlining");
244 return false;
245 }
246
1b1a35ee
XL
247 let self_features = &self.codegen_fn_attrs.target_features;
248 let callee_features = &codegen_fn_attrs.target_features;
249 if callee_features.iter().any(|feature| !self_features.contains(feature)) {
250 debug!("`callee has extra target features - not inlining");
251 return false;
252 }
253
254 let self_no_sanitize =
255 self.codegen_fn_attrs.no_sanitize & self.tcx.sess.opts.debugging_opts.sanitizer;
256 let callee_no_sanitize =
257 codegen_fn_attrs.no_sanitize & self.tcx.sess.opts.debugging_opts.sanitizer;
258 if self_no_sanitize != callee_no_sanitize {
259 debug!("`callee has incompatible no_sanitize attribute - not inlining");
f035d41b 260 return false;
74b04a01
XL
261 }
262
94b46f34 263 let hinted = match codegen_fn_attrs.inline {
8bb4bdeb
XL
264 // Just treat inline(always) as a hint for now,
265 // there are cases that prevent inlining that we
266 // need to check for first.
267 attr::InlineAttr::Always => true,
abe05a73 268 attr::InlineAttr::Never => {
416331ca 269 debug!("`#[inline(never)]` present - not inlining");
dfeec247 270 return false;
abe05a73 271 }
8bb4bdeb
XL
272 attr::InlineAttr::Hint => true,
273 attr::InlineAttr::None => false,
274 };
275
276 // Only inline local functions if they would be eligible for cross-crate
277 // inlining. This is to ensure that the final crate doesn't have MIR that
278 // reference unexported symbols
279 if callsite.callee.is_local() {
532ac7d7 280 if callsite.substs.non_erasable_generics().count() == 0 && !hinted {
abe05a73 281 debug!(" callee is an exported function - not inlining");
8bb4bdeb
XL
282 return false;
283 }
284 }
285
dfeec247 286 let mut threshold = if hinted { HINT_THRESHOLD } else { DEFAULT_THRESHOLD };
8bb4bdeb
XL
287
288 // Significantly lower the threshold for inlining cold functions
94b46f34 289 if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::COLD) {
8bb4bdeb
XL
290 threshold /= 5;
291 }
292
293 // Give a bonus functions with a small number of blocks,
294 // We normally have two or three blocks for even
295 // very small functions.
dc9dc135 296 if callee_body.basic_blocks().len() <= 3 {
8bb4bdeb
XL
297 threshold += threshold / 4;
298 }
abe05a73 299 debug!(" final inline threshold = {}", threshold);
8bb4bdeb
XL
300
301 // FIXME: Give a bonus to functions with only a single caller
302
9fa01778 303 let param_env = tcx.param_env(self.source.def_id());
8bb4bdeb
XL
304
305 let mut first_block = true;
306 let mut cost = 0;
307
308 // Traverse the MIR manually so we can account for the effects of
309 // inlining on the CFG.
310 let mut work_list = vec![START_BLOCK];
dc9dc135 311 let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
8bb4bdeb 312 while let Some(bb) = work_list.pop() {
dfeec247
XL
313 if !visited.insert(bb.index()) {
314 continue;
315 }
dc9dc135 316 let blk = &callee_body.basic_blocks()[bb];
8bb4bdeb
XL
317
318 for stmt in &blk.statements {
319 // Don't count StorageLive/StorageDead in the inlining cost.
320 match stmt.kind {
dfeec247
XL
321 StatementKind::StorageLive(_)
322 | StatementKind::StorageDead(_)
323 | StatementKind::Nop => {}
324 _ => cost += INSTR_COST,
8bb4bdeb
XL
325 }
326 }
327 let term = blk.terminator();
328 let mut is_drop = false;
329 match term.kind {
f035d41b
XL
330 TerminatorKind::Drop { ref place, target, unwind }
331 | TerminatorKind::DropAndReplace { ref place, target, unwind, .. } => {
8bb4bdeb
XL
332 is_drop = true;
333 work_list.push(target);
f035d41b 334 // If the place doesn't actually need dropping, treat it like
8bb4bdeb 335 // a regular goto.
f035d41b 336 let ty = place.ty(callee_body, tcx).subst(tcx, callsite.substs).ty;
7cac9316 337 if ty.needs_drop(tcx, param_env) {
8bb4bdeb
XL
338 cost += CALL_PENALTY;
339 if let Some(unwind) = unwind {
ba9703b0 340 cost += LANDINGPAD_PENALTY;
8bb4bdeb
XL
341 work_list.push(unwind);
342 }
343 } else {
344 cost += INSTR_COST;
345 }
346 }
347
dfeec247
XL
348 TerminatorKind::Unreachable | TerminatorKind::Call { destination: None, .. }
349 if first_block =>
350 {
8bb4bdeb
XL
351 // If the function always diverges, don't inline
352 // unless the cost is zero
353 threshold = 0;
354 }
355
ba9703b0 356 TerminatorKind::Call { func: Operand::Constant(ref f), cleanup, .. } => {
1b1a35ee 357 if let ty::FnDef(def_id, _) = *f.literal.ty.kind() {
8bb4bdeb 358 // Don't give intrinsics the extra penalty for calls
041b39d2 359 let f = tcx.fn_sig(def_id);
8bb4bdeb
XL
360 if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
361 cost += INSTR_COST;
362 } else {
363 cost += CALL_PENALTY;
364 }
ba9703b0
XL
365 } else {
366 cost += CALL_PENALTY;
367 }
368 if cleanup.is_some() {
369 cost += LANDINGPAD_PENALTY;
370 }
371 }
372 TerminatorKind::Assert { cleanup, .. } => {
373 cost += CALL_PENALTY;
374
375 if cleanup.is_some() {
376 cost += LANDINGPAD_PENALTY;
8bb4bdeb
XL
377 }
378 }
ba9703b0 379 TerminatorKind::Resume => cost += RESUME_PENALTY,
dfeec247 380 _ => cost += INSTR_COST,
8bb4bdeb
XL
381 }
382
383 if !is_drop {
83c7162d 384 for &succ in term.successors() {
8bb4bdeb
XL
385 work_list.push(succ);
386 }
387 }
388
389 first_block = false;
390 }
391
392 // Count up the cost of local variables and temps, if we know the size
393 // use that, otherwise we use a moderately-large dummy cost.
394
395 let ptr_size = tcx.data_layout.pointer_size.bytes();
396
dc9dc135
XL
397 for v in callee_body.vars_and_temps_iter() {
398 let v = &callee_body.local_decls[v];
8bb4bdeb
XL
399 let ty = v.ty.subst(tcx, callsite.substs);
400 // Cost of the var is the size in machine-words, if we know
401 // it.
dfeec247 402 if let Some(size) = type_size_of(tcx, param_env, ty) {
8bb4bdeb
XL
403 cost += (size / ptr_size) as usize;
404 } else {
405 cost += UNKNOWN_SIZE_COST;
406 }
407 }
408
94b46f34 409 if let attr::InlineAttr::Always = codegen_fn_attrs.inline {
abe05a73 410 debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost);
8bb4bdeb
XL
411 true
412 } else {
abe05a73
XL
413 if cost <= threshold {
414 debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold);
415 true
416 } else {
417 debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold);
418 false
419 }
8bb4bdeb
XL
420 }
421 }
422
dfeec247
XL
423 fn inline_call(
424 &self,
425 callsite: CallSite<'tcx>,
f9f354fc
XL
426 caller_body: &mut Body<'tcx>,
427 mut callee_body: Body<'tcx>,
dfeec247 428 ) -> bool {
dc9dc135 429 let terminator = caller_body[callsite.bb].terminator.take().unwrap();
8bb4bdeb
XL
430 match terminator.kind {
431 // FIXME: Handle inlining of diverging calls
432 TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
416331ca 433 debug!("inlined {:?} into {:?}", callsite.callee, self.source);
8bb4bdeb 434
dc9dc135
XL
435 let mut local_map = IndexVec::with_capacity(callee_body.local_decls.len());
436 let mut scope_map = IndexVec::with_capacity(callee_body.source_scopes.len());
8bb4bdeb 437
dc9dc135 438 for mut scope in callee_body.source_scopes.iter().cloned() {
8bb4bdeb
XL
439 if scope.parent_scope.is_none() {
440 scope.parent_scope = Some(callsite.location.scope);
60c5eb7d
XL
441 // FIXME(eddyb) is this really needed?
442 // (also note that it's always overwritten below)
dc9dc135 443 scope.span = callee_body.span;
8bb4bdeb
XL
444 }
445
60c5eb7d
XL
446 // FIXME(eddyb) this doesn't seem right at all.
447 // The inlined source scopes should probably be annotated as
448 // such, but also contain all of the original information.
8bb4bdeb
XL
449 scope.span = callsite.location.span;
450
dc9dc135 451 let idx = caller_body.source_scopes.push(scope);
8bb4bdeb
XL
452 scope_map.push(idx);
453 }
454
dc9dc135
XL
455 for loc in callee_body.vars_and_temps_iter() {
456 let mut local = callee_body.local_decls[loc].clone();
8bb4bdeb 457
dfeec247 458 local.source_info.scope = scope_map[local.source_info.scope];
cc61c64b 459 local.source_info.span = callsite.location.span;
8bb4bdeb 460
dc9dc135 461 let idx = caller_body.local_decls.push(local);
8bb4bdeb
XL
462 local_map.push(idx);
463 }
464
8bb4bdeb
XL
465 // If the call is something like `a[*i] = f(i)`, where
466 // `i : &mut usize`, then just duplicating the `a[*i]`
ff7c6d11 467 // Place could result in two different locations if `f`
8bb4bdeb 468 // writes to `i`. To prevent this we need to create a temporary
ff7c6d11 469 // borrow of the place and pass the destination as `*temp` instead.
ba9703b0 470 fn dest_needs_borrow(place: Place<'_>) -> bool {
e1599b0c
XL
471 for elem in place.projection.iter() {
472 match elem {
dfeec247 473 ProjectionElem::Deref | ProjectionElem::Index(_) => return true,
e1599b0c 474 _ => {}
8bb4bdeb 475 }
e1599b0c 476 }
dc9dc135 477
dfeec247 478 false
8bb4bdeb
XL
479 }
480
ba9703b0 481 let dest = if dest_needs_borrow(destination.0) {
416331ca 482 debug!("creating temp for return destination");
8bb4bdeb 483 let dest = Rvalue::Ref(
48663c56 484 self.tcx.lifetimes.re_erased,
2c00a5a8 485 BorrowKind::Mut { allow_two_phase_borrow: false },
dfeec247
XL
486 destination.0,
487 );
8bb4bdeb 488
f9f354fc 489 let ty = dest.ty(caller_body, self.tcx);
8bb4bdeb 490
f9f354fc 491 let temp = LocalDecl::new(ty, callsite.location.span);
8bb4bdeb 492
dc9dc135
XL
493 let tmp = caller_body.local_decls.push(temp);
494 let tmp = Place::from(tmp);
8bb4bdeb
XL
495
496 let stmt = Statement {
497 source_info: callsite.location,
dfeec247 498 kind: StatementKind::Assign(box (tmp, dest)),
8bb4bdeb 499 };
dfeec247 500 caller_body[callsite.bb].statements.push(stmt);
e74abb32 501 self.tcx.mk_place_deref(tmp)
8bb4bdeb
XL
502 } else {
503 destination.0
504 };
505
506 let return_block = destination.1;
507
83c7162d 508 // Copy the arguments if needed.
1b1a35ee 509 let args: Vec<_> = self.make_call_args(args, &callsite, caller_body, return_block);
8bb4bdeb 510
dc9dc135 511 let bb_len = caller_body.basic_blocks().len();
8bb4bdeb
XL
512 let mut integrator = Integrator {
513 block_idx: bb_len,
514 args: &args,
3b2f2976
XL
515 local_map,
516 scope_map,
8bb4bdeb 517 destination: dest,
3b2f2976 518 return_block,
8bb4bdeb 519 cleanup_block: cleanup,
e1599b0c 520 in_cleanup_block: false,
e74abb32 521 tcx: self.tcx,
8bb4bdeb
XL
522 };
523
60c5eb7d
XL
524 for mut var_debug_info in callee_body.var_debug_info.drain(..) {
525 integrator.visit_var_debug_info(&mut var_debug_info);
526 caller_body.var_debug_info.push(var_debug_info);
527 }
8bb4bdeb 528
dc9dc135 529 for (bb, mut block) in callee_body.basic_blocks_mut().drain_enumerated(..) {
8bb4bdeb 530 integrator.visit_basic_block_data(bb, &mut block);
dc9dc135 531 caller_body.basic_blocks_mut().push(block);
8bb4bdeb
XL
532 }
533
534 let terminator = Terminator {
535 source_info: callsite.location,
dfeec247 536 kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) },
8bb4bdeb
XL
537 };
538
dc9dc135 539 caller_body[callsite.bb].terminator = Some(terminator);
8bb4bdeb
XL
540
541 true
542 }
543 kind => {
dfeec247
XL
544 caller_body[callsite.bb].terminator =
545 Some(Terminator { source_info: terminator.source_info, kind });
8bb4bdeb
XL
546 false
547 }
548 }
549 }
550
abe05a73
XL
551 fn make_call_args(
552 &self,
553 args: Vec<Operand<'tcx>>,
554 callsite: &CallSite<'tcx>,
f9f354fc 555 caller_body: &mut Body<'tcx>,
1b1a35ee 556 return_block: BasicBlock,
ff7c6d11 557 ) -> Vec<Local> {
8bb4bdeb 558 let tcx = self.tcx;
abe05a73 559
ff7c6d11
XL
560 // There is a bit of a mismatch between the *caller* of a closure and the *callee*.
561 // The caller provides the arguments wrapped up in a tuple:
562 //
563 // tuple_tmp = (a, b, c)
564 // Fn::call(closure_ref, tuple_tmp)
565 //
566 // meanwhile the closure body expects the arguments (here, `a`, `b`, and `c`)
94b46f34
XL
567 // as distinct arguments. (This is the "rust-call" ABI hack.) Normally, codegen has
568 // the job of unpacking this tuple. But here, we are codegen. =) So we want to create
ff7c6d11
XL
569 // a vector like
570 //
571 // [closure_ref, tuple_tmp.0, tuple_tmp.1, tuple_tmp.2]
572 //
573 // Except for one tiny wrinkle: we don't actually want `tuple_tmp.0`. It's more convenient
574 // if we "spill" that into *another* temporary, so that we can map the argument
575 // variable in the callee MIR directly to an argument variable on our side.
576 // So we introduce temporaries like:
577 //
578 // tmp0 = tuple_tmp.0
579 // tmp1 = tuple_tmp.1
580 // tmp2 = tuple_tmp.2
581 //
582 // and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
abe05a73
XL
583 if tcx.is_closure(callsite.callee) {
584 let mut args = args.into_iter();
1b1a35ee
XL
585 let self_ = self.create_temp_if_necessary(
586 args.next().unwrap(),
587 callsite,
588 caller_body,
589 return_block,
590 );
591 let tuple = self.create_temp_if_necessary(
592 args.next().unwrap(),
593 callsite,
594 caller_body,
595 return_block,
596 );
abe05a73
XL
597 assert!(args.next().is_none());
598
dc9dc135 599 let tuple = Place::from(tuple);
1b1a35ee 600 let tuple_tys = if let ty::Tuple(s) = tuple.ty(caller_body, tcx).ty.kind() {
abe05a73
XL
601 s
602 } else {
603 bug!("Closure arguments are not passed as a tuple");
604 };
605
ff7c6d11
XL
606 // The `closure_ref` in our example above.
607 let closure_ref_arg = iter::once(self_);
608
609 // The `tmp0`, `tmp1`, and `tmp2` in our example abonve.
dfeec247
XL
610 let tuple_tmp_args = tuple_tys.iter().enumerate().map(|(i, ty)| {
611 // This is e.g., `tuple_tmp.0` in our example above.
612 let tuple_field =
ba9703b0 613 Operand::Move(tcx.mk_place_field(tuple, Field::new(i), ty.expect_ty()));
dfeec247
XL
614
615 // Spill to a local to make e.g., `tmp0`.
1b1a35ee 616 self.create_temp_if_necessary(tuple_field, callsite, caller_body, return_block)
dfeec247 617 });
ff7c6d11
XL
618
619 closure_ref_arg.chain(tuple_tmp_args).collect()
abe05a73
XL
620 } else {
621 args.into_iter()
1b1a35ee 622 .map(|a| self.create_temp_if_necessary(a, callsite, caller_body, return_block))
abe05a73
XL
623 .collect()
624 }
625 }
626
627 /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh
628 /// temporary `T` and an instruction `T = arg`, and returns `T`.
629 fn create_temp_if_necessary(
630 &self,
631 arg: Operand<'tcx>,
632 callsite: &CallSite<'tcx>,
f9f354fc 633 caller_body: &mut Body<'tcx>,
1b1a35ee 634 return_block: BasicBlock,
ff7c6d11 635 ) -> Local {
8bb4bdeb
XL
636 // FIXME: Analysis of the usage of the arguments to avoid
637 // unnecessary temporaries.
abe05a73 638
e74abb32
XL
639 if let Operand::Move(place) = &arg {
640 if let Some(local) = place.as_local() {
641 if caller_body.local_kind(local) == LocalKind::Temp {
642 // Reuse the operand if it's a temporary already
643 return local;
644 }
8bb4bdeb 645 }
abe05a73 646 }
8bb4bdeb 647
416331ca 648 debug!("creating temp for argument {:?}", arg);
abe05a73
XL
649 // Otherwise, create a temporary for the arg
650 let arg = Rvalue::Use(arg);
8bb4bdeb 651
f9f354fc 652 let ty = arg.ty(caller_body, self.tcx);
8bb4bdeb 653
f9f354fc 654 let arg_tmp = LocalDecl::new(ty, callsite.location.span);
dc9dc135 655 let arg_tmp = caller_body.local_decls.push(arg_tmp);
8bb4bdeb 656
1b1a35ee
XL
657 caller_body[callsite.bb].statements.push(Statement {
658 source_info: callsite.location,
659 kind: StatementKind::StorageLive(arg_tmp),
660 });
661 caller_body[callsite.bb].statements.push(Statement {
abe05a73 662 source_info: callsite.location,
dfeec247 663 kind: StatementKind::Assign(box (Place::from(arg_tmp), arg)),
1b1a35ee
XL
664 });
665 caller_body[return_block].statements.insert(
666 0,
667 Statement { source_info: callsite.location, kind: StatementKind::StorageDead(arg_tmp) },
668 );
669
abe05a73 670 arg_tmp
8bb4bdeb
XL
671 }
672}
673
dc9dc135
XL
674fn type_size_of<'tcx>(
675 tcx: TyCtxt<'tcx>,
676 param_env: ty::ParamEnv<'tcx>,
677 ty: Ty<'tcx>,
678) -> Option<u64> {
2c00a5a8 679 tcx.layout_of(param_env.and(ty)).ok().map(|layout| layout.size.bytes())
8bb4bdeb
XL
680}
681
682/**
683 * Integrator.
684 *
685 * Integrates blocks from the callee function into the calling function.
686 * Updates block indices, references to locals and other control flow
687 * stuff.
dc9dc135
XL
688*/
689struct Integrator<'a, 'tcx> {
8bb4bdeb 690 block_idx: usize,
ff7c6d11 691 args: &'a [Local],
8bb4bdeb 692 local_map: IndexVec<Local, Local>,
94b46f34 693 scope_map: IndexVec<SourceScope, SourceScope>,
ff7c6d11 694 destination: Place<'tcx>,
8bb4bdeb
XL
695 return_block: BasicBlock,
696 cleanup_block: Option<BasicBlock>,
697 in_cleanup_block: bool,
e74abb32 698 tcx: TyCtxt<'tcx>,
8bb4bdeb
XL
699}
700
701impl<'a, 'tcx> Integrator<'a, 'tcx> {
702 fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
703 let new = BasicBlock::new(tgt.index() + self.block_idx);
416331ca 704 debug!("updating target `{:?}`, new: `{:?}`", tgt, new);
8bb4bdeb
XL
705 new
706 }
8bb4bdeb 707
74b04a01
XL
708 fn make_integrate_local(&self, local: Local) -> Local {
709 if local == RETURN_PLACE {
dfeec247 710 return self.destination.local;
ea8adc8c 711 }
e74abb32 712
ea8adc8c
XL
713 let idx = local.index() - 1;
714 if idx < self.args.len() {
e74abb32 715 return self.args[idx];
ea8adc8c 716 }
e74abb32
XL
717
718 self.local_map[Local::new(idx - self.args.len())]
719 }
720}
721
722impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
723 fn tcx(&self) -> TyCtxt<'tcx> {
724 self.tcx
ea8adc8c
XL
725 }
726
dfeec247 727 fn visit_local(&mut self, local: &mut Local, _ctxt: PlaceContext, _location: Location) {
74b04a01 728 *local = self.make_integrate_local(*local);
e74abb32
XL
729 }
730
dfeec247
XL
731 fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
732 // If this is the `RETURN_PLACE`, we need to rebase any projections onto it.
733 let dest_proj_len = self.destination.projection.len();
734 if place.local == RETURN_PLACE && dest_proj_len > 0 {
735 let mut projs = Vec::with_capacity(dest_proj_len + place.projection.len());
736 projs.extend(self.destination.projection);
737 projs.extend(place.projection);
738
739 place.projection = self.tcx.intern_place_elems(&*projs);
8bb4bdeb 740 }
dfeec247
XL
741 // Handles integrating any locals that occur in the base
742 // or projections
743 self.super_place(place, context, location)
8bb4bdeb
XL
744 }
745
746 fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
747 self.in_cleanup_block = data.is_cleanup;
748 self.super_basic_block_data(block, data);
749 self.in_cleanup_block = false;
750 }
751
dfeec247 752 fn visit_retag(&mut self, kind: &mut RetagKind, place: &mut Place<'tcx>, loc: Location) {
0731742a 753 self.super_retag(kind, place, loc);
a1dfa0c6
XL
754
755 // We have to patch all inlined retags to be aware that they are no longer
756 // happening on function entry.
0731742a
XL
757 if *kind == RetagKind::FnEntry {
758 *kind = RetagKind::Default;
759 }
a1dfa0c6
XL
760 }
761
f035d41b 762 fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, loc: Location) {
f9f354fc
XL
763 // Don't try to modify the implicit `_0` access on return (`return` terminators are
764 // replaced down below anyways).
f035d41b
XL
765 if !matches!(terminator.kind, TerminatorKind::Return) {
766 self.super_terminator(terminator, loc);
f9f354fc 767 }
8bb4bdeb 768
f035d41b 769 match terminator.kind {
dfeec247
XL
770 TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => bug!(),
771 TerminatorKind::Goto { ref mut target } => {
8bb4bdeb
XL
772 *target = self.update_target(*target);
773 }
774 TerminatorKind::SwitchInt { ref mut targets, .. } => {
775 for tgt in targets {
776 *tgt = self.update_target(*tgt);
777 }
778 }
dfeec247
XL
779 TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
780 | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
8bb4bdeb
XL
781 *target = self.update_target(*target);
782 if let Some(tgt) = *unwind {
783 *unwind = Some(self.update_target(tgt));
784 } else if !self.in_cleanup_block {
785 // Unless this drop is in a cleanup block, add an unwind edge to
b7449926 786 // the original call's cleanup block
8bb4bdeb
XL
787 *unwind = self.cleanup_block;
788 }
789 }
790 TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
791 if let Some((_, ref mut tgt)) = *destination {
792 *tgt = self.update_target(*tgt);
793 }
794 if let Some(tgt) = *cleanup {
795 *cleanup = Some(self.update_target(tgt));
796 } else if !self.in_cleanup_block {
797 // Unless this call is in a cleanup block, add an unwind edge to
b7449926 798 // the original call's cleanup block
8bb4bdeb
XL
799 *cleanup = self.cleanup_block;
800 }
801 }
802 TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
803 *target = self.update_target(*target);
804 if let Some(tgt) = *cleanup {
805 *cleanup = Some(self.update_target(tgt));
806 } else if !self.in_cleanup_block {
807 // Unless this assert is in a cleanup block, add an unwind edge to
b7449926 808 // the original call's cleanup block
8bb4bdeb
XL
809 *cleanup = self.cleanup_block;
810 }
811 }
812 TerminatorKind::Return => {
f035d41b 813 terminator.kind = TerminatorKind::Goto { target: self.return_block };
8bb4bdeb
XL
814 }
815 TerminatorKind::Resume => {
816 if let Some(tgt) = self.cleanup_block {
f035d41b 817 terminator.kind = TerminatorKind::Goto { target: tgt }
8bb4bdeb
XL
818 }
819 }
dfeec247
XL
820 TerminatorKind::Abort => {}
821 TerminatorKind::Unreachable => {}
f035d41b 822 TerminatorKind::FalseEdge { ref mut real_target, ref mut imaginary_target } => {
abe05a73 823 *real_target = self.update_target(*real_target);
dc9dc135 824 *imaginary_target = self.update_target(*imaginary_target);
abe05a73 825 }
dfeec247
XL
826 TerminatorKind::FalseUnwind { real_target: _, unwind: _ } =>
827 // see the ordering of passes in the optimized_mir query.
828 {
2c00a5a8 829 bug!("False unwinds should have been removed before inlining")
dfeec247 830 }
f9f354fc
XL
831 TerminatorKind::InlineAsm { ref mut destination, .. } => {
832 if let Some(ref mut tgt) = *destination {
833 *tgt = self.update_target(*tgt);
834 }
835 }
8bb4bdeb
XL
836 }
837 }
838
94b46f34 839 fn visit_source_scope(&mut self, scope: &mut SourceScope) {
8bb4bdeb
XL
840 *scope = self.scope_map[*scope];
841 }
8bb4bdeb 842}