]>
Commit | Line | Data |
---|---|---|
1 | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT | |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
11 | //! Inlining pass for MIR functions | |
12 | ||
13 | use rustc::hir; | |
14 | use rustc::hir::def_id::DefId; | |
15 | ||
16 | use rustc_data_structures::bitvec::BitVector; | |
17 | use rustc_data_structures::indexed_vec::{Idx, IndexVec}; | |
18 | ||
19 | use rustc::mir::*; | |
20 | use rustc::mir::visit::*; | |
21 | use rustc::ty::{self, Instance, Ty, TyCtxt, TypeFoldable}; | |
22 | use rustc::ty::subst::{Subst,Substs}; | |
23 | ||
24 | use std::collections::VecDeque; | |
25 | use transform::{MirPass, MirSource}; | |
26 | use super::simplify::{remove_dead_blocks, CfgSimplifier}; | |
27 | ||
28 | use syntax::{attr}; | |
29 | use syntax::abi::Abi; | |
30 | ||
31 | const DEFAULT_THRESHOLD: usize = 50; | |
32 | const HINT_THRESHOLD: usize = 100; | |
33 | ||
34 | const INSTR_COST: usize = 5; | |
35 | const CALL_PENALTY: usize = 25; | |
36 | ||
37 | const UNKNOWN_SIZE_COST: usize = 10; | |
38 | ||
39 | pub struct Inline; | |
40 | ||
41 | #[derive(Copy, Clone, Debug)] | |
42 | struct CallSite<'tcx> { | |
43 | callee: DefId, | |
44 | substs: &'tcx Substs<'tcx>, | |
45 | bb: BasicBlock, | |
46 | location: SourceInfo, | |
47 | } | |
48 | ||
49 | impl MirPass for Inline { | |
50 | fn run_pass<'a, 'tcx>(&self, | |
51 | tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
52 | source: MirSource, | |
53 | mir: &mut Mir<'tcx>) { | |
54 | if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 { | |
55 | Inliner { tcx, source }.run_pass(mir); | |
56 | } | |
57 | } | |
58 | } | |
59 | ||
60 | struct Inliner<'a, 'tcx: 'a> { | |
61 | tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
62 | source: MirSource, | |
63 | } | |
64 | ||
65 | impl<'a, 'tcx> Inliner<'a, 'tcx> { | |
66 | fn run_pass(&self, caller_mir: &mut Mir<'tcx>) { | |
67 | // Keep a queue of callsites to try inlining on. We take | |
68 | // advantage of the fact that queries detect cycles here to | |
69 | // allow us to try and fetch the fully optimized MIR of a | |
70 | // call; if it succeeds, we can inline it and we know that | |
71 | // they do not call us. Otherwise, we just don't try to | |
72 | // inline. | |
73 | // | |
74 | // We use a queue so that we inline "broadly" before we inline | |
75 | // in depth. It is unclear if this is the best heuristic, | |
76 | // really, but that's true of all the heuristics in this | |
77 | // file. =) | |
78 | ||
79 | let mut callsites = VecDeque::new(); | |
80 | ||
81 | let param_env = self.tcx.param_env(self.source.def_id); | |
82 | ||
83 | // Only do inlining into fn bodies. | |
84 | let id = self.tcx.hir.as_local_node_id(self.source.def_id).unwrap(); | |
85 | let body_owner_kind = self.tcx.hir.body_owner_kind(id); | |
86 | if let (hir::BodyOwnerKind::Fn, None) = (body_owner_kind, self.source.promoted) { | |
87 | ||
88 | for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated() { | |
89 | // Don't inline calls that are in cleanup blocks. | |
90 | if bb_data.is_cleanup { continue; } | |
91 | ||
92 | // Only consider direct calls to functions | |
93 | let terminator = bb_data.terminator(); | |
94 | if let TerminatorKind::Call { | |
95 | func: Operand::Constant(ref f), .. } = terminator.kind { | |
96 | if let ty::TyFnDef(callee_def_id, substs) = f.ty.sty { | |
97 | if let Some(instance) = Instance::resolve(self.tcx, | |
98 | param_env, | |
99 | callee_def_id, | |
100 | substs) { | |
101 | callsites.push_back(CallSite { | |
102 | callee: instance.def_id(), | |
103 | substs: instance.substs, | |
104 | bb, | |
105 | location: terminator.source_info | |
106 | }); | |
107 | } | |
108 | } | |
109 | } | |
110 | } | |
111 | } else { | |
112 | return; | |
113 | } | |
114 | ||
115 | let mut local_change; | |
116 | let mut changed = false; | |
117 | ||
118 | loop { | |
119 | local_change = false; | |
120 | while let Some(callsite) = callsites.pop_front() { | |
121 | debug!("checking whether to inline callsite {:?}", callsite); | |
122 | if !self.tcx.is_mir_available(callsite.callee) { | |
123 | debug!("checking whether to inline callsite {:?} - MIR unavailable", callsite); | |
124 | continue; | |
125 | } | |
126 | ||
127 | let callee_mir = match ty::queries::optimized_mir::try_get(self.tcx, | |
128 | callsite.location.span, | |
129 | callsite.callee) { | |
130 | Ok(ref callee_mir) if self.should_inline(callsite, callee_mir) => { | |
131 | subst_and_normalize(callee_mir, self.tcx, &callsite.substs, param_env) | |
132 | } | |
133 | Ok(_) => continue, | |
134 | ||
135 | Err(mut bug) => { | |
136 | // FIXME(#43542) shouldn't have to cancel an error | |
137 | bug.cancel(); | |
138 | continue | |
139 | } | |
140 | }; | |
141 | ||
142 | let start = caller_mir.basic_blocks().len(); | |
143 | debug!("attempting to inline callsite {:?} - mir={:?}", callsite, callee_mir); | |
144 | if !self.inline_call(callsite, caller_mir, callee_mir) { | |
145 | debug!("attempting to inline callsite {:?} - failure", callsite); | |
146 | continue; | |
147 | } | |
148 | debug!("attempting to inline callsite {:?} - success", callsite); | |
149 | ||
150 | // Add callsites from inlined function | |
151 | for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) { | |
152 | // Only consider direct calls to functions | |
153 | let terminator = bb_data.terminator(); | |
154 | if let TerminatorKind::Call { | |
155 | func: Operand::Constant(ref f), .. } = terminator.kind { | |
156 | if let ty::TyFnDef(callee_def_id, substs) = f.ty.sty { | |
157 | // Don't inline the same function multiple times. | |
158 | if callsite.callee != callee_def_id { | |
159 | callsites.push_back(CallSite { | |
160 | callee: callee_def_id, | |
161 | substs, | |
162 | bb, | |
163 | location: terminator.source_info | |
164 | }); | |
165 | } | |
166 | } | |
167 | } | |
168 | } | |
169 | ||
170 | local_change = true; | |
171 | changed = true; | |
172 | } | |
173 | ||
174 | if !local_change { | |
175 | break; | |
176 | } | |
177 | } | |
178 | ||
179 | // Simplify if we inlined anything. | |
180 | if changed { | |
181 | debug!("Running simplify cfg on {:?}", self.source); | |
182 | CfgSimplifier::new(caller_mir).simplify(); | |
183 | remove_dead_blocks(caller_mir); | |
184 | } | |
185 | } | |
186 | ||
187 | fn should_inline(&self, | |
188 | callsite: CallSite<'tcx>, | |
189 | callee_mir: &Mir<'tcx>) | |
190 | -> bool | |
191 | { | |
192 | debug!("should_inline({:?})", callsite); | |
193 | let tcx = self.tcx; | |
194 | ||
195 | // Don't inline closures that have captures | |
196 | // FIXME: Handle closures better | |
197 | if callee_mir.upvar_decls.len() > 0 { | |
198 | debug!(" upvar decls present - not inlining"); | |
199 | return false; | |
200 | } | |
201 | ||
202 | // Cannot inline generators which haven't been transformed yet | |
203 | if callee_mir.yield_ty.is_some() { | |
204 | debug!(" yield ty present - not inlining"); | |
205 | return false; | |
206 | } | |
207 | ||
208 | let attrs = tcx.get_attrs(callsite.callee); | |
209 | let hint = attr::find_inline_attr(None, &attrs[..]); | |
210 | ||
211 | let hinted = match hint { | |
212 | // Just treat inline(always) as a hint for now, | |
213 | // there are cases that prevent inlining that we | |
214 | // need to check for first. | |
215 | attr::InlineAttr::Always => true, | |
216 | attr::InlineAttr::Never => { | |
217 | debug!("#[inline(never)] present - not inlining"); | |
218 | return false | |
219 | } | |
220 | attr::InlineAttr::Hint => true, | |
221 | attr::InlineAttr::None => false, | |
222 | }; | |
223 | ||
224 | // Only inline local functions if they would be eligible for cross-crate | |
225 | // inlining. This is to ensure that the final crate doesn't have MIR that | |
226 | // reference unexported symbols | |
227 | if callsite.callee.is_local() { | |
228 | if callsite.substs.types().count() == 0 && !hinted { | |
229 | debug!(" callee is an exported function - not inlining"); | |
230 | return false; | |
231 | } | |
232 | } | |
233 | ||
234 | let mut threshold = if hinted { | |
235 | HINT_THRESHOLD | |
236 | } else { | |
237 | DEFAULT_THRESHOLD | |
238 | }; | |
239 | ||
240 | // Significantly lower the threshold for inlining cold functions | |
241 | if attr::contains_name(&attrs[..], "cold") { | |
242 | threshold /= 5; | |
243 | } | |
244 | ||
245 | // Give a bonus functions with a small number of blocks, | |
246 | // We normally have two or three blocks for even | |
247 | // very small functions. | |
248 | if callee_mir.basic_blocks().len() <= 3 { | |
249 | threshold += threshold / 4; | |
250 | } | |
251 | debug!(" final inline threshold = {}", threshold); | |
252 | ||
253 | // FIXME: Give a bonus to functions with only a single caller | |
254 | ||
255 | let param_env = tcx.param_env(self.source.def_id); | |
256 | ||
257 | let mut first_block = true; | |
258 | let mut cost = 0; | |
259 | ||
260 | // Traverse the MIR manually so we can account for the effects of | |
261 | // inlining on the CFG. | |
262 | let mut work_list = vec![START_BLOCK]; | |
263 | let mut visited = BitVector::new(callee_mir.basic_blocks().len()); | |
264 | while let Some(bb) = work_list.pop() { | |
265 | if !visited.insert(bb.index()) { continue; } | |
266 | let blk = &callee_mir.basic_blocks()[bb]; | |
267 | ||
268 | for stmt in &blk.statements { | |
269 | // Don't count StorageLive/StorageDead in the inlining cost. | |
270 | match stmt.kind { | |
271 | StatementKind::StorageLive(_) | | |
272 | StatementKind::StorageDead(_) | | |
273 | StatementKind::Nop => {} | |
274 | _ => cost += INSTR_COST | |
275 | } | |
276 | } | |
277 | let term = blk.terminator(); | |
278 | let mut is_drop = false; | |
279 | match term.kind { | |
280 | TerminatorKind::Drop { ref location, target, unwind } | | |
281 | TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => { | |
282 | is_drop = true; | |
283 | work_list.push(target); | |
284 | // If the location doesn't actually need dropping, treat it like | |
285 | // a regular goto. | |
286 | let ty = location.ty(callee_mir, tcx).subst(tcx, callsite.substs); | |
287 | let ty = ty.to_ty(tcx); | |
288 | if ty.needs_drop(tcx, param_env) { | |
289 | cost += CALL_PENALTY; | |
290 | if let Some(unwind) = unwind { | |
291 | work_list.push(unwind); | |
292 | } | |
293 | } else { | |
294 | cost += INSTR_COST; | |
295 | } | |
296 | } | |
297 | ||
298 | TerminatorKind::Unreachable | | |
299 | TerminatorKind::Call { destination: None, .. } if first_block => { | |
300 | // If the function always diverges, don't inline | |
301 | // unless the cost is zero | |
302 | threshold = 0; | |
303 | } | |
304 | ||
305 | TerminatorKind::Call {func: Operand::Constant(ref f), .. } => { | |
306 | if let ty::TyFnDef(def_id, _) = f.ty.sty { | |
307 | // Don't give intrinsics the extra penalty for calls | |
308 | let f = tcx.fn_sig(def_id); | |
309 | if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic { | |
310 | cost += INSTR_COST; | |
311 | } else { | |
312 | cost += CALL_PENALTY; | |
313 | } | |
314 | } | |
315 | } | |
316 | TerminatorKind::Assert { .. } => cost += CALL_PENALTY, | |
317 | _ => cost += INSTR_COST | |
318 | } | |
319 | ||
320 | if !is_drop { | |
321 | for &succ in &term.successors()[..] { | |
322 | work_list.push(succ); | |
323 | } | |
324 | } | |
325 | ||
326 | first_block = false; | |
327 | } | |
328 | ||
329 | // Count up the cost of local variables and temps, if we know the size | |
330 | // use that, otherwise we use a moderately-large dummy cost. | |
331 | ||
332 | let ptr_size = tcx.data_layout.pointer_size.bytes(); | |
333 | ||
334 | for v in callee_mir.vars_and_temps_iter() { | |
335 | let v = &callee_mir.local_decls[v]; | |
336 | let ty = v.ty.subst(tcx, callsite.substs); | |
337 | // Cost of the var is the size in machine-words, if we know | |
338 | // it. | |
339 | if let Some(size) = type_size_of(tcx, param_env.clone(), ty) { | |
340 | cost += (size / ptr_size) as usize; | |
341 | } else { | |
342 | cost += UNKNOWN_SIZE_COST; | |
343 | } | |
344 | } | |
345 | ||
346 | if let attr::InlineAttr::Always = hint { | |
347 | debug!("INLINING {:?} because inline(always) [cost={}]", callsite, cost); | |
348 | true | |
349 | } else { | |
350 | if cost <= threshold { | |
351 | debug!("INLINING {:?} [cost={} <= threshold={}]", callsite, cost, threshold); | |
352 | true | |
353 | } else { | |
354 | debug!("NOT inlining {:?} [cost={} > threshold={}]", callsite, cost, threshold); | |
355 | false | |
356 | } | |
357 | } | |
358 | } | |
359 | ||
360 | fn inline_call(&self, | |
361 | callsite: CallSite<'tcx>, | |
362 | caller_mir: &mut Mir<'tcx>, | |
363 | mut callee_mir: Mir<'tcx>) -> bool { | |
364 | let terminator = caller_mir[callsite.bb].terminator.take().unwrap(); | |
365 | match terminator.kind { | |
366 | // FIXME: Handle inlining of diverging calls | |
367 | TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => { | |
368 | debug!("Inlined {:?} into {:?}", callsite.callee, self.source); | |
369 | ||
370 | let is_box_free = Some(callsite.callee) == self.tcx.lang_items().box_free_fn(); | |
371 | ||
372 | let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len()); | |
373 | let mut scope_map = IndexVec::with_capacity(callee_mir.visibility_scopes.len()); | |
374 | let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len()); | |
375 | ||
376 | for mut scope in callee_mir.visibility_scopes.iter().cloned() { | |
377 | if scope.parent_scope.is_none() { | |
378 | scope.parent_scope = Some(callsite.location.scope); | |
379 | scope.span = callee_mir.span; | |
380 | } | |
381 | ||
382 | scope.span = callsite.location.span; | |
383 | ||
384 | let idx = caller_mir.visibility_scopes.push(scope); | |
385 | scope_map.push(idx); | |
386 | } | |
387 | ||
388 | for loc in callee_mir.vars_and_temps_iter() { | |
389 | let mut local = callee_mir.local_decls[loc].clone(); | |
390 | ||
391 | local.source_info.scope = scope_map[local.source_info.scope]; | |
392 | local.source_info.span = callsite.location.span; | |
393 | ||
394 | let idx = caller_mir.local_decls.push(local); | |
395 | local_map.push(idx); | |
396 | } | |
397 | ||
398 | for p in callee_mir.promoted.iter().cloned() { | |
399 | let idx = caller_mir.promoted.push(p); | |
400 | promoted_map.push(idx); | |
401 | } | |
402 | ||
403 | // If the call is something like `a[*i] = f(i)`, where | |
404 | // `i : &mut usize`, then just duplicating the `a[*i]` | |
405 | // Lvalue could result in two different locations if `f` | |
406 | // writes to `i`. To prevent this we need to create a temporary | |
407 | // borrow of the lvalue and pass the destination as `*temp` instead. | |
408 | fn dest_needs_borrow(lval: &Lvalue) -> bool { | |
409 | match *lval { | |
410 | Lvalue::Projection(ref p) => { | |
411 | match p.elem { | |
412 | ProjectionElem::Deref | | |
413 | ProjectionElem::Index(_) => true, | |
414 | _ => dest_needs_borrow(&p.base) | |
415 | } | |
416 | } | |
417 | // Static variables need a borrow because the callee | |
418 | // might modify the same static. | |
419 | Lvalue::Static(_) => true, | |
420 | _ => false | |
421 | } | |
422 | } | |
423 | ||
424 | let dest = if dest_needs_borrow(&destination.0) { | |
425 | debug!("Creating temp for return destination"); | |
426 | let dest = Rvalue::Ref( | |
427 | self.tcx.types.re_erased, | |
428 | BorrowKind::Mut, | |
429 | destination.0); | |
430 | ||
431 | let ty = dest.ty(caller_mir, self.tcx); | |
432 | ||
433 | let temp = LocalDecl::new_temp(ty, callsite.location.span); | |
434 | ||
435 | let tmp = caller_mir.local_decls.push(temp); | |
436 | let tmp = Lvalue::Local(tmp); | |
437 | ||
438 | let stmt = Statement { | |
439 | source_info: callsite.location, | |
440 | kind: StatementKind::Assign(tmp.clone(), dest) | |
441 | }; | |
442 | caller_mir[callsite.bb] | |
443 | .statements.push(stmt); | |
444 | tmp.deref() | |
445 | } else { | |
446 | destination.0 | |
447 | }; | |
448 | ||
449 | let return_block = destination.1; | |
450 | ||
451 | let args : Vec<_> = if is_box_free { | |
452 | assert!(args.len() == 1); | |
453 | // box_free takes a Box, but is defined with a *mut T, inlining | |
454 | // needs to generate the cast. | |
455 | // FIXME: we should probably just generate correct MIR in the first place... | |
456 | ||
457 | let arg = if let Operand::Consume(ref lval) = args[0] { | |
458 | lval.clone() | |
459 | } else { | |
460 | bug!("Constant arg to \"box_free\""); | |
461 | }; | |
462 | ||
463 | let ptr_ty = args[0].ty(caller_mir, self.tcx); | |
464 | vec![self.cast_box_free_arg(arg, ptr_ty, &callsite, caller_mir)] | |
465 | } else { | |
466 | // Copy the arguments if needed. | |
467 | self.make_call_args(args, &callsite, caller_mir) | |
468 | }; | |
469 | ||
470 | let bb_len = caller_mir.basic_blocks().len(); | |
471 | let mut integrator = Integrator { | |
472 | block_idx: bb_len, | |
473 | args: &args, | |
474 | local_map, | |
475 | scope_map, | |
476 | promoted_map, | |
477 | _callsite: callsite, | |
478 | destination: dest, | |
479 | return_block, | |
480 | cleanup_block: cleanup, | |
481 | in_cleanup_block: false | |
482 | }; | |
483 | ||
484 | ||
485 | for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) { | |
486 | integrator.visit_basic_block_data(bb, &mut block); | |
487 | caller_mir.basic_blocks_mut().push(block); | |
488 | } | |
489 | ||
490 | let terminator = Terminator { | |
491 | source_info: callsite.location, | |
492 | kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) } | |
493 | }; | |
494 | ||
495 | caller_mir[callsite.bb].terminator = Some(terminator); | |
496 | ||
497 | true | |
498 | } | |
499 | kind => { | |
500 | caller_mir[callsite.bb].terminator = Some(Terminator { | |
501 | source_info: terminator.source_info, | |
502 | kind, | |
503 | }); | |
504 | false | |
505 | } | |
506 | } | |
507 | } | |
508 | ||
509 | fn cast_box_free_arg(&self, arg: Lvalue<'tcx>, ptr_ty: Ty<'tcx>, | |
510 | callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Operand<'tcx> { | |
511 | let arg = Rvalue::Ref( | |
512 | self.tcx.types.re_erased, | |
513 | BorrowKind::Mut, | |
514 | arg.deref()); | |
515 | ||
516 | let ty = arg.ty(caller_mir, self.tcx); | |
517 | let ref_tmp = LocalDecl::new_temp(ty, callsite.location.span); | |
518 | let ref_tmp = caller_mir.local_decls.push(ref_tmp); | |
519 | let ref_tmp = Lvalue::Local(ref_tmp); | |
520 | ||
521 | let ref_stmt = Statement { | |
522 | source_info: callsite.location, | |
523 | kind: StatementKind::Assign(ref_tmp.clone(), arg) | |
524 | }; | |
525 | ||
526 | caller_mir[callsite.bb] | |
527 | .statements.push(ref_stmt); | |
528 | ||
529 | let pointee_ty = match ptr_ty.sty { | |
530 | ty::TyRawPtr(tm) | ty::TyRef(_, tm) => tm.ty, | |
531 | _ if ptr_ty.is_box() => ptr_ty.boxed_ty(), | |
532 | _ => bug!("Invalid type `{:?}` for call to box_free", ptr_ty) | |
533 | }; | |
534 | let ptr_ty = self.tcx.mk_mut_ptr(pointee_ty); | |
535 | ||
536 | let raw_ptr = Rvalue::Cast(CastKind::Misc, Operand::Consume(ref_tmp), ptr_ty); | |
537 | ||
538 | let cast_tmp = LocalDecl::new_temp(ptr_ty, callsite.location.span); | |
539 | let cast_tmp = caller_mir.local_decls.push(cast_tmp); | |
540 | let cast_tmp = Lvalue::Local(cast_tmp); | |
541 | ||
542 | let cast_stmt = Statement { | |
543 | source_info: callsite.location, | |
544 | kind: StatementKind::Assign(cast_tmp.clone(), raw_ptr) | |
545 | }; | |
546 | ||
547 | caller_mir[callsite.bb] | |
548 | .statements.push(cast_stmt); | |
549 | ||
550 | Operand::Consume(cast_tmp) | |
551 | } | |
552 | ||
553 | fn make_call_args( | |
554 | &self, | |
555 | args: Vec<Operand<'tcx>>, | |
556 | callsite: &CallSite<'tcx>, | |
557 | caller_mir: &mut Mir<'tcx>, | |
558 | ) -> Vec<Operand<'tcx>> { | |
559 | let tcx = self.tcx; | |
560 | ||
561 | // A closure is passed its self-type and a tuple like `(arg1, arg2, ...)`, | |
562 | // hence mappings to tuple fields are needed. | |
563 | if tcx.is_closure(callsite.callee) { | |
564 | let mut args = args.into_iter(); | |
565 | let self_ = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir); | |
566 | let tuple = self.create_temp_if_necessary(args.next().unwrap(), callsite, caller_mir); | |
567 | assert!(args.next().is_none()); | |
568 | ||
569 | let tuple_tys = if let ty::TyTuple(s, _) = tuple.ty(caller_mir, tcx).to_ty(tcx).sty { | |
570 | s | |
571 | } else { | |
572 | bug!("Closure arguments are not passed as a tuple"); | |
573 | }; | |
574 | ||
575 | let mut res = Vec::with_capacity(1 + tuple_tys.len()); | |
576 | res.push(Operand::Consume(self_)); | |
577 | res.extend(tuple_tys.iter().enumerate().map(|(i, ty)| { | |
578 | Operand::Consume(tuple.clone().field(Field::new(i), ty)) | |
579 | })); | |
580 | res | |
581 | } else { | |
582 | args.into_iter() | |
583 | .map(|a| Operand::Consume(self.create_temp_if_necessary(a, callsite, caller_mir))) | |
584 | .collect() | |
585 | } | |
586 | } | |
587 | ||
588 | /// If `arg` is already a temporary, returns it. Otherwise, introduces a fresh | |
589 | /// temporary `T` and an instruction `T = arg`, and returns `T`. | |
590 | fn create_temp_if_necessary( | |
591 | &self, | |
592 | arg: Operand<'tcx>, | |
593 | callsite: &CallSite<'tcx>, | |
594 | caller_mir: &mut Mir<'tcx>, | |
595 | ) -> Lvalue<'tcx> { | |
596 | // FIXME: Analysis of the usage of the arguments to avoid | |
597 | // unnecessary temporaries. | |
598 | ||
599 | if let Operand::Consume(Lvalue::Local(local)) = arg { | |
600 | if caller_mir.local_kind(local) == LocalKind::Temp { | |
601 | // Reuse the operand if it's a temporary already | |
602 | return Lvalue::Local(local); | |
603 | } | |
604 | } | |
605 | ||
606 | debug!("Creating temp for argument {:?}", arg); | |
607 | // Otherwise, create a temporary for the arg | |
608 | let arg = Rvalue::Use(arg); | |
609 | ||
610 | let ty = arg.ty(caller_mir, self.tcx); | |
611 | ||
612 | let arg_tmp = LocalDecl::new_temp(ty, callsite.location.span); | |
613 | let arg_tmp = caller_mir.local_decls.push(arg_tmp); | |
614 | let arg_tmp = Lvalue::Local(arg_tmp); | |
615 | ||
616 | let stmt = Statement { | |
617 | source_info: callsite.location, | |
618 | kind: StatementKind::Assign(arg_tmp.clone(), arg), | |
619 | }; | |
620 | caller_mir[callsite.bb].statements.push(stmt); | |
621 | arg_tmp | |
622 | } | |
623 | } | |
624 | ||
625 | fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
626 | param_env: ty::ParamEnv<'tcx>, | |
627 | ty: Ty<'tcx>) -> Option<u64> { | |
628 | ty.layout(tcx, param_env).ok().map(|layout| { | |
629 | layout.size(&tcx.data_layout).bytes() | |
630 | }) | |
631 | } | |
632 | ||
633 | fn subst_and_normalize<'a, 'tcx: 'a>( | |
634 | mir: &Mir<'tcx>, | |
635 | tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
636 | substs: &'tcx ty::subst::Substs<'tcx>, | |
637 | param_env: ty::ParamEnv<'tcx>, | |
638 | ) -> Mir<'tcx> { | |
639 | struct Folder<'a, 'tcx: 'a> { | |
640 | tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
641 | param_env: ty::ParamEnv<'tcx>, | |
642 | substs: &'tcx ty::subst::Substs<'tcx>, | |
643 | } | |
644 | impl<'a, 'tcx: 'a> ty::fold::TypeFolder<'tcx, 'tcx> for Folder<'a, 'tcx> { | |
645 | fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> { | |
646 | self.tcx | |
647 | } | |
648 | ||
649 | fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { | |
650 | self.tcx.trans_apply_param_substs_env(&self.substs, self.param_env, &t) | |
651 | } | |
652 | } | |
653 | let mut f = Folder { tcx, param_env, substs }; | |
654 | mir.fold_with(&mut f) | |
655 | } | |
656 | ||
657 | /** | |
658 | * Integrator. | |
659 | * | |
660 | * Integrates blocks from the callee function into the calling function. | |
661 | * Updates block indices, references to locals and other control flow | |
662 | * stuff. | |
663 | */ | |
664 | struct Integrator<'a, 'tcx: 'a> { | |
665 | block_idx: usize, | |
666 | args: &'a [Operand<'tcx>], | |
667 | local_map: IndexVec<Local, Local>, | |
668 | scope_map: IndexVec<VisibilityScope, VisibilityScope>, | |
669 | promoted_map: IndexVec<Promoted, Promoted>, | |
670 | _callsite: CallSite<'tcx>, | |
671 | destination: Lvalue<'tcx>, | |
672 | return_block: BasicBlock, | |
673 | cleanup_block: Option<BasicBlock>, | |
674 | in_cleanup_block: bool, | |
675 | } | |
676 | ||
677 | impl<'a, 'tcx> Integrator<'a, 'tcx> { | |
678 | fn update_target(&self, tgt: BasicBlock) -> BasicBlock { | |
679 | let new = BasicBlock::new(tgt.index() + self.block_idx); | |
680 | debug!("Updating target `{:?}`, new: `{:?}`", tgt, new); | |
681 | new | |
682 | } | |
683 | ||
684 | fn arg_index(&self, arg: Local) -> Option<usize> { | |
685 | let idx = arg.index(); | |
686 | if idx > 0 && idx <= self.args.len() { | |
687 | Some(idx - 1) | |
688 | } else { | |
689 | None | |
690 | } | |
691 | } | |
692 | } | |
693 | ||
694 | impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> { | |
695 | fn visit_local(&mut self, | |
696 | local: &mut Local, | |
697 | _ctxt: LvalueContext<'tcx>, | |
698 | _location: Location) { | |
699 | if *local == RETURN_POINTER { | |
700 | match self.destination { | |
701 | Lvalue::Local(l) => { | |
702 | *local = l; | |
703 | return; | |
704 | }, | |
705 | ref lval => bug!("Return lvalue is {:?}, not local", lval) | |
706 | } | |
707 | } | |
708 | let idx = local.index() - 1; | |
709 | if idx < self.args.len() { | |
710 | match self.args[idx] { | |
711 | Operand::Consume(Lvalue::Local(l)) => { | |
712 | *local = l; | |
713 | return; | |
714 | }, | |
715 | ref op => bug!("Arg operand `{:?}` is {:?}, not local", idx, op) | |
716 | } | |
717 | } | |
718 | *local = self.local_map[Local::new(idx - self.args.len())]; | |
719 | } | |
720 | ||
721 | fn visit_lvalue(&mut self, | |
722 | lvalue: &mut Lvalue<'tcx>, | |
723 | _ctxt: LvalueContext<'tcx>, | |
724 | _location: Location) { | |
725 | if let Lvalue::Local(RETURN_POINTER) = *lvalue { | |
726 | // Return pointer; update the lvalue itself | |
727 | *lvalue = self.destination.clone(); | |
728 | } else { | |
729 | self.super_lvalue(lvalue, _ctxt, _location); | |
730 | } | |
731 | } | |
732 | ||
733 | fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) { | |
734 | if let Operand::Consume(Lvalue::Local(arg)) = *operand { | |
735 | if let Some(idx) = self.arg_index(arg) { | |
736 | let new_arg = self.args[idx].clone(); | |
737 | *operand = new_arg; | |
738 | return; | |
739 | } | |
740 | } | |
741 | self.super_operand(operand, location); | |
742 | } | |
743 | ||
744 | fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) { | |
745 | self.in_cleanup_block = data.is_cleanup; | |
746 | self.super_basic_block_data(block, data); | |
747 | self.in_cleanup_block = false; | |
748 | } | |
749 | ||
750 | fn visit_terminator_kind(&mut self, block: BasicBlock, | |
751 | kind: &mut TerminatorKind<'tcx>, loc: Location) { | |
752 | self.super_terminator_kind(block, kind, loc); | |
753 | ||
754 | match *kind { | |
755 | TerminatorKind::GeneratorDrop | | |
756 | TerminatorKind::Yield { .. } => bug!(), | |
757 | TerminatorKind::Goto { ref mut target} => { | |
758 | *target = self.update_target(*target); | |
759 | } | |
760 | TerminatorKind::SwitchInt { ref mut targets, .. } => { | |
761 | for tgt in targets { | |
762 | *tgt = self.update_target(*tgt); | |
763 | } | |
764 | } | |
765 | TerminatorKind::Drop { ref mut target, ref mut unwind, .. } | | |
766 | TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => { | |
767 | *target = self.update_target(*target); | |
768 | if let Some(tgt) = *unwind { | |
769 | *unwind = Some(self.update_target(tgt)); | |
770 | } else if !self.in_cleanup_block { | |
771 | // Unless this drop is in a cleanup block, add an unwind edge to | |
772 | // the orignal call's cleanup block | |
773 | *unwind = self.cleanup_block; | |
774 | } | |
775 | } | |
776 | TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => { | |
777 | if let Some((_, ref mut tgt)) = *destination { | |
778 | *tgt = self.update_target(*tgt); | |
779 | } | |
780 | if let Some(tgt) = *cleanup { | |
781 | *cleanup = Some(self.update_target(tgt)); | |
782 | } else if !self.in_cleanup_block { | |
783 | // Unless this call is in a cleanup block, add an unwind edge to | |
784 | // the orignal call's cleanup block | |
785 | *cleanup = self.cleanup_block; | |
786 | } | |
787 | } | |
788 | TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => { | |
789 | *target = self.update_target(*target); | |
790 | if let Some(tgt) = *cleanup { | |
791 | *cleanup = Some(self.update_target(tgt)); | |
792 | } else if !self.in_cleanup_block { | |
793 | // Unless this assert is in a cleanup block, add an unwind edge to | |
794 | // the orignal call's cleanup block | |
795 | *cleanup = self.cleanup_block; | |
796 | } | |
797 | } | |
798 | TerminatorKind::Return => { | |
799 | *kind = TerminatorKind::Goto { target: self.return_block }; | |
800 | } | |
801 | TerminatorKind::Resume => { | |
802 | if let Some(tgt) = self.cleanup_block { | |
803 | *kind = TerminatorKind::Goto { target: tgt } | |
804 | } | |
805 | } | |
806 | TerminatorKind::Unreachable => { } | |
807 | TerminatorKind::FalseEdges { ref mut real_target, ref mut imaginary_targets } => { | |
808 | *real_target = self.update_target(*real_target); | |
809 | for target in imaginary_targets { | |
810 | *target = self.update_target(*target); | |
811 | } | |
812 | } | |
813 | } | |
814 | } | |
815 | ||
816 | fn visit_visibility_scope(&mut self, scope: &mut VisibilityScope) { | |
817 | *scope = self.scope_map[*scope]; | |
818 | } | |
819 | ||
820 | fn visit_literal(&mut self, literal: &mut Literal<'tcx>, loc: Location) { | |
821 | if let Literal::Promoted { ref mut index } = *literal { | |
822 | if let Some(p) = self.promoted_map.get(*index).cloned() { | |
823 | *index = p; | |
824 | } | |
825 | } else { | |
826 | self.super_literal(literal, loc); | |
827 | } | |
828 | } | |
829 | } |