]> git.proxmox.com Git - rustc.git/blob - src/librustc_codegen_ssa/mir/mod.rs
New upstream version 1.47.0+dfsg1
[rustc.git] / src / librustc_codegen_ssa / mir / mod.rs
1 use crate::base;
2 use crate::traits::*;
3 use rustc_errors::ErrorReported;
4 use rustc_middle::mir;
5 use rustc_middle::mir::interpret::ErrorHandled;
6 use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt, TyAndLayout};
7 use rustc_middle::ty::{self, Instance, Ty, TypeFoldable};
8 use rustc_target::abi::call::{FnAbi, PassMode};
9 use rustc_target::abi::HasDataLayout;
10
11 use std::iter;
12
13 use rustc_index::bit_set::BitSet;
14 use rustc_index::vec::IndexVec;
15
16 use self::analyze::CleanupKind;
17 use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
18 use self::place::PlaceRef;
19 use rustc_middle::mir::traversal;
20
21 use self::operand::{OperandRef, OperandValue};
22
23 /// Master context for codegenning from MIR.
24 pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
25 instance: Instance<'tcx>,
26
27 mir: &'tcx mir::Body<'tcx>,
28
29 debug_context: Option<FunctionDebugContext<Bx::DIScope>>,
30
31 llfn: Bx::Function,
32
33 cx: &'a Bx::CodegenCx,
34
35 fn_abi: FnAbi<'tcx, Ty<'tcx>>,
36
37 /// When unwinding is initiated, we have to store this personality
38 /// value somewhere so that we can load it and re-use it in the
39 /// resume instruction. The personality is (afaik) some kind of
40 /// value used for C++ unwinding, which must filter by type: we
41 /// don't really care about it very much. Anyway, this value
42 /// contains an alloca into which the personality is stored and
43 /// then later loaded when generating the DIVERGE_BLOCK.
44 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
45
46 /// A `Block` for each MIR `BasicBlock`
47 blocks: IndexVec<mir::BasicBlock, Bx::BasicBlock>,
48
49 /// The funclet status of each basic block
50 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
51
52 /// When targeting MSVC, this stores the cleanup info for each funclet
53 /// BB. This is initialized as we compute the funclets' head block in RPO.
54 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
55
56 /// This stores the landing-pad block for a given BB, computed lazily on GNU
57 /// and eagerly on MSVC.
58 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
59
60 /// Cached unreachable block
61 unreachable_block: Option<Bx::BasicBlock>,
62
63 /// The location where each MIR arg/var/tmp/ret is stored. This is
64 /// usually an `PlaceRef` representing an alloca, but not always:
65 /// sometimes we can skip the alloca and just store the value
66 /// directly using an `OperandRef`, which makes for tighter LLVM
67 /// IR. The conditions for using an `OperandRef` are as follows:
68 ///
69 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
70 /// - the operand must never be referenced indirectly
71 /// - we should not take its address using the `&` operator
72 /// - nor should it appear in a place path like `tmp.a`
73 /// - the operand must be defined by an rvalue that can generate immediate
74 /// values
75 ///
76 /// Avoiding allocs can also be important for certain intrinsics,
77 /// notably `expect`.
78 locals: IndexVec<mir::Local, LocalRef<'tcx, Bx::Value>>,
79
80 /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
81 /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
82 per_local_var_debug_info:
83 Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>,
84
85 /// Caller location propagated if this function has `#[track_caller]`.
86 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
87 }
88
89 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
90 pub fn monomorphize<T>(&self, value: &T) -> T
91 where
92 T: Copy + TypeFoldable<'tcx>,
93 {
94 debug!("monomorphize: self.instance={:?}", self.instance);
95 if let Some(substs) = self.instance.substs_for_mir_body() {
96 self.cx.tcx().subst_and_normalize_erasing_regions(
97 substs,
98 ty::ParamEnv::reveal_all(),
99 &value,
100 )
101 } else {
102 self.cx.tcx().normalize_erasing_regions(ty::ParamEnv::reveal_all(), *value)
103 }
104 }
105 }
106
107 enum LocalRef<'tcx, V> {
108 Place(PlaceRef<'tcx, V>),
109 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
110 /// `*p` is the fat pointer that references the actual unsized place.
111 /// Every time it is initialized, we have to reallocate the place
112 /// and update the fat pointer. That's the reason why it is indirect.
113 UnsizedPlace(PlaceRef<'tcx, V>),
114 Operand(Option<OperandRef<'tcx, V>>),
115 }
116
117 impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> {
118 fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
119 bx: &mut Bx,
120 layout: TyAndLayout<'tcx>,
121 ) -> LocalRef<'tcx, V> {
122 if layout.is_zst() {
123 // Zero-size temporaries aren't always initialized, which
124 // doesn't matter because they don't contain data, but
125 // we need something in the operand.
126 LocalRef::Operand(Some(OperandRef::new_zst(bx, layout)))
127 } else {
128 LocalRef::Operand(None)
129 }
130 }
131 }
132
133 ///////////////////////////////////////////////////////////////////////////
134
135 pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
136 cx: &'a Bx::CodegenCx,
137 instance: Instance<'tcx>,
138 ) {
139 assert!(!instance.substs.needs_infer());
140
141 let llfn = cx.get_fn(instance);
142
143 let mir = cx.tcx().instance_mir(instance.def);
144
145 let fn_abi = FnAbi::of_instance(cx, instance, &[]);
146 debug!("fn_abi: {:?}", fn_abi);
147
148 let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir);
149
150 let mut bx = Bx::new_block(cx, llfn, "start");
151
152 if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
153 bx.set_personality_fn(cx.eh_personality());
154 }
155
156 bx.sideeffect();
157
158 let cleanup_kinds = analyze::cleanup_kinds(&mir);
159 // Allocate a `Block` for every basic block, except
160 // the start block, if nothing loops back to it.
161 let reentrant_start_block = !mir.predecessors()[mir::START_BLOCK].is_empty();
162 let block_bxs: IndexVec<mir::BasicBlock, Bx::BasicBlock> = mir
163 .basic_blocks()
164 .indices()
165 .map(|bb| {
166 if bb == mir::START_BLOCK && !reentrant_start_block {
167 bx.llbb()
168 } else {
169 bx.build_sibling_block(&format!("{:?}", bb)).llbb()
170 }
171 })
172 .collect();
173
174 let (landing_pads, funclets) = create_funclets(&mir, &mut bx, &cleanup_kinds, &block_bxs);
175 let mut fx = FunctionCx {
176 instance,
177 mir,
178 llfn,
179 fn_abi,
180 cx,
181 personality_slot: None,
182 blocks: block_bxs,
183 unreachable_block: None,
184 cleanup_kinds,
185 landing_pads,
186 funclets,
187 locals: IndexVec::new(),
188 debug_context,
189 per_local_var_debug_info: None,
190 caller_location: None,
191 };
192
193 fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info();
194
195 for const_ in &mir.required_consts {
196 if let Err(err) = fx.eval_mir_constant(const_) {
197 match err {
198 // errored or at least linted
199 ErrorHandled::Reported(ErrorReported) | ErrorHandled::Linted => {}
200 ErrorHandled::TooGeneric => {
201 span_bug!(const_.span, "codgen encountered polymorphic constant: {:?}", err)
202 }
203 }
204 }
205 }
206
207 let memory_locals = analyze::non_ssa_locals(&fx);
208
209 // Allocate variable and temp allocas
210 fx.locals = {
211 let args = arg_local_refs(&mut bx, &mut fx, &memory_locals);
212
213 let mut allocate_local = |local| {
214 let decl = &mir.local_decls[local];
215 let layout = bx.layout_of(fx.monomorphize(&decl.ty));
216 assert!(!layout.ty.has_erasable_regions());
217
218 if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
219 debug!("alloc: {:?} (return place) -> place", local);
220 let llretptr = bx.get_param(0);
221 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
222 }
223
224 if memory_locals.contains(local) {
225 debug!("alloc: {:?} -> place", local);
226 if layout.is_unsized() {
227 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut bx, layout))
228 } else {
229 LocalRef::Place(PlaceRef::alloca(&mut bx, layout))
230 }
231 } else {
232 debug!("alloc: {:?} -> operand", local);
233 LocalRef::new_operand(&mut bx, layout)
234 }
235 };
236
237 let retptr = allocate_local(mir::RETURN_PLACE);
238 iter::once(retptr)
239 .chain(args.into_iter())
240 .chain(mir.vars_and_temps_iter().map(allocate_local))
241 .collect()
242 };
243
244 // Apply debuginfo to the newly allocated locals.
245 fx.debug_introduce_locals(&mut bx);
246
247 // Branch to the START block, if it's not the entry block.
248 if reentrant_start_block {
249 bx.br(fx.blocks[mir::START_BLOCK]);
250 }
251
252 let rpo = traversal::reverse_postorder(&mir);
253 let mut visited = BitSet::new_empty(mir.basic_blocks().len());
254
255 // Codegen the body of each block using reverse postorder
256 for (bb, _) in rpo {
257 visited.insert(bb.index());
258 fx.codegen_block(bb);
259 }
260
261 // Remove blocks that haven't been visited, or have no
262 // predecessors.
263 for bb in mir.basic_blocks().indices() {
264 // Unreachable block
265 if !visited.contains(bb.index()) {
266 debug!("codegen_mir: block {:?} was not visited", bb);
267 unsafe {
268 bx.delete_basic_block(fx.blocks[bb]);
269 }
270 }
271 }
272 }
273
274 fn create_funclets<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
275 mir: &'tcx mir::Body<'tcx>,
276 bx: &mut Bx,
277 cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
278 block_bxs: &IndexVec<mir::BasicBlock, Bx::BasicBlock>,
279 ) -> (
280 IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
281 IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
282 ) {
283 block_bxs
284 .iter_enumerated()
285 .zip(cleanup_kinds)
286 .map(|((bb, &llbb), cleanup_kind)| {
287 match *cleanup_kind {
288 CleanupKind::Funclet if base::wants_msvc_seh(bx.sess()) => {}
289 _ => return (None, None),
290 }
291
292 let funclet;
293 let ret_llbb;
294 match mir[bb].terminator.as_ref().map(|t| &t.kind) {
295 // This is a basic block that we're aborting the program for,
296 // notably in an `extern` function. These basic blocks are inserted
297 // so that we assert that `extern` functions do indeed not panic,
298 // and if they do we abort the process.
299 //
300 // On MSVC these are tricky though (where we're doing funclets). If
301 // we were to do a cleanuppad (like below) the normal functions like
302 // `longjmp` would trigger the abort logic, terminating the
303 // program. Instead we insert the equivalent of `catch(...)` for C++
304 // which magically doesn't trigger when `longjmp` files over this
305 // frame.
306 //
307 // Lots more discussion can be found on #48251 but this codegen is
308 // modeled after clang's for:
309 //
310 // try {
311 // foo();
312 // } catch (...) {
313 // bar();
314 // }
315 Some(&mir::TerminatorKind::Abort) => {
316 let mut cs_bx = bx.build_sibling_block(&format!("cs_funclet{:?}", bb));
317 let mut cp_bx = bx.build_sibling_block(&format!("cp_funclet{:?}", bb));
318 ret_llbb = cs_bx.llbb();
319
320 let cs = cs_bx.catch_switch(None, None, 1);
321 cs_bx.add_handler(cs, cp_bx.llbb());
322
323 // The "null" here is actually a RTTI type descriptor for the
324 // C++ personality function, but `catch (...)` has no type so
325 // it's null. The 64 here is actually a bitfield which
326 // represents that this is a catch-all block.
327 let null = bx.const_null(
328 bx.type_i8p_ext(bx.cx().data_layout().instruction_address_space),
329 );
330 let sixty_four = bx.const_i32(64);
331 funclet = cp_bx.catch_pad(cs, &[null, sixty_four, null]);
332 cp_bx.br(llbb);
333 }
334 _ => {
335 let mut cleanup_bx = bx.build_sibling_block(&format!("funclet_{:?}", bb));
336 ret_llbb = cleanup_bx.llbb();
337 funclet = cleanup_bx.cleanup_pad(None, &[]);
338 cleanup_bx.br(llbb);
339 }
340 };
341
342 (Some(ret_llbb), Some(funclet))
343 })
344 .unzip()
345 }
346
347 /// Produces, for each argument, a `Value` pointing at the
348 /// argument's value. As arguments are places, these are always
349 /// indirect.
350 fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
351 bx: &mut Bx,
352 fx: &mut FunctionCx<'a, 'tcx, Bx>,
353 memory_locals: &BitSet<mir::Local>,
354 ) -> Vec<LocalRef<'tcx, Bx::Value>> {
355 let mir = fx.mir;
356 let mut idx = 0;
357 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
358
359 let args = mir
360 .args_iter()
361 .enumerate()
362 .map(|(arg_index, local)| {
363 let arg_decl = &mir.local_decls[local];
364
365 if Some(local) == mir.spread_arg {
366 // This argument (e.g., the last argument in the "rust-call" ABI)
367 // is a tuple that was spread at the ABI level and now we have
368 // to reconstruct it into a tuple local variable, from multiple
369 // individual LLVM function arguments.
370
371 let arg_ty = fx.monomorphize(&arg_decl.ty);
372 let tupled_arg_tys = match arg_ty.kind {
373 ty::Tuple(ref tys) => tys,
374 _ => bug!("spread argument isn't a tuple?!"),
375 };
376
377 let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
378 for i in 0..tupled_arg_tys.len() {
379 let arg = &fx.fn_abi.args[idx];
380 idx += 1;
381 if arg.pad.is_some() {
382 llarg_idx += 1;
383 }
384 let pr_field = place.project_field(bx, i);
385 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
386 }
387
388 return LocalRef::Place(place);
389 }
390
391 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
392 let arg_ty = fx.monomorphize(&arg_decl.ty);
393
394 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
395 bx.va_start(va_list.llval);
396
397 return LocalRef::Place(va_list);
398 }
399
400 let arg = &fx.fn_abi.args[idx];
401 idx += 1;
402 if arg.pad.is_some() {
403 llarg_idx += 1;
404 }
405
406 if !memory_locals.contains(local) {
407 // We don't have to cast or keep the argument in the alloca.
408 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
409 // of putting everything in allocas just so we can use llvm.dbg.declare.
410 let local = |op| LocalRef::Operand(Some(op));
411 match arg.mode {
412 PassMode::Ignore => {
413 return local(OperandRef::new_zst(bx, arg.layout));
414 }
415 PassMode::Direct(_) => {
416 let llarg = bx.get_param(llarg_idx);
417 llarg_idx += 1;
418 return local(OperandRef::from_immediate_or_packed_pair(
419 bx, llarg, arg.layout,
420 ));
421 }
422 PassMode::Pair(..) => {
423 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
424 llarg_idx += 2;
425
426 return local(OperandRef {
427 val: OperandValue::Pair(a, b),
428 layout: arg.layout,
429 });
430 }
431 _ => {}
432 }
433 }
434
435 if arg.is_sized_indirect() {
436 // Don't copy an indirect argument to an alloca, the caller
437 // already put it in a temporary alloca and gave it up.
438 // FIXME: lifetimes
439 let llarg = bx.get_param(llarg_idx);
440 llarg_idx += 1;
441 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
442 } else if arg.is_unsized_indirect() {
443 // As the storage for the indirect argument lives during
444 // the whole function call, we just copy the fat pointer.
445 let llarg = bx.get_param(llarg_idx);
446 llarg_idx += 1;
447 let llextra = bx.get_param(llarg_idx);
448 llarg_idx += 1;
449 let indirect_operand = OperandValue::Pair(llarg, llextra);
450
451 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
452 indirect_operand.store(bx, tmp);
453 LocalRef::UnsizedPlace(tmp)
454 } else {
455 let tmp = PlaceRef::alloca(bx, arg.layout);
456 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
457 LocalRef::Place(tmp)
458 }
459 })
460 .collect::<Vec<_>>();
461
462 if fx.instance.def.requires_caller_location(bx.tcx()) {
463 assert_eq!(
464 fx.fn_abi.args.len(),
465 args.len() + 1,
466 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR",
467 );
468
469 let arg = fx.fn_abi.args.last().unwrap();
470 match arg.mode {
471 PassMode::Direct(_) => (),
472 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
473 }
474
475 fx.caller_location = Some(OperandRef {
476 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
477 layout: arg.layout,
478 });
479 }
480
481 args
482 }
483
484 mod analyze;
485 mod block;
486 pub mod constant;
487 pub mod coverageinfo;
488 pub mod debuginfo;
489 pub mod operand;
490 pub mod place;
491 mod rvalue;
492 mod statement;