]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_ssa/src/mir/mod.rs
New upstream version 1.52.0~beta.3+dfsg1
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / mod.rs
1 use crate::base;
2 use crate::traits::*;
3 use rustc_errors::ErrorReported;
4 use rustc_middle::mir;
5 use rustc_middle::mir::interpret::ErrorHandled;
6 use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt, TyAndLayout};
7 use rustc_middle::ty::{self, Instance, Ty, TypeFoldable};
8 use rustc_target::abi::call::{FnAbi, PassMode};
9 use rustc_target::abi::HasDataLayout;
10
11 use std::iter;
12
13 use rustc_index::bit_set::BitSet;
14 use rustc_index::vec::IndexVec;
15
16 use self::analyze::CleanupKind;
17 use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
18 use self::place::PlaceRef;
19 use rustc_middle::mir::traversal;
20
21 use self::operand::{OperandRef, OperandValue};
22
23 /// Master context for codegenning from MIR.
24 pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
25 instance: Instance<'tcx>,
26
27 mir: &'tcx mir::Body<'tcx>,
28
29 debug_context: Option<FunctionDebugContext<Bx::DIScope, Bx::DILocation>>,
30
31 llfn: Bx::Function,
32
33 cx: &'a Bx::CodegenCx,
34
35 fn_abi: FnAbi<'tcx, Ty<'tcx>>,
36
37 /// When unwinding is initiated, we have to store this personality
38 /// value somewhere so that we can load it and re-use it in the
39 /// resume instruction. The personality is (afaik) some kind of
40 /// value used for C++ unwinding, which must filter by type: we
41 /// don't really care about it very much. Anyway, this value
42 /// contains an alloca into which the personality is stored and
43 /// then later loaded when generating the DIVERGE_BLOCK.
44 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
45
46 /// A `Block` for each MIR `BasicBlock`
47 blocks: IndexVec<mir::BasicBlock, Bx::BasicBlock>,
48
49 /// The funclet status of each basic block
50 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
51
52 /// When targeting MSVC, this stores the cleanup info for each funclet
53 /// BB. This is initialized as we compute the funclets' head block in RPO.
54 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
55
56 /// This stores the landing-pad block for a given BB, computed lazily on GNU
57 /// and eagerly on MSVC.
58 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
59
60 /// Cached unreachable block
61 unreachable_block: Option<Bx::BasicBlock>,
62
63 /// The location where each MIR arg/var/tmp/ret is stored. This is
64 /// usually an `PlaceRef` representing an alloca, but not always:
65 /// sometimes we can skip the alloca and just store the value
66 /// directly using an `OperandRef`, which makes for tighter LLVM
67 /// IR. The conditions for using an `OperandRef` are as follows:
68 ///
69 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
70 /// - the operand must never be referenced indirectly
71 /// - we should not take its address using the `&` operator
72 /// - nor should it appear in a place path like `tmp.a`
73 /// - the operand must be defined by an rvalue that can generate immediate
74 /// values
75 ///
76 /// Avoiding allocs can also be important for certain intrinsics,
77 /// notably `expect`.
78 locals: IndexVec<mir::Local, LocalRef<'tcx, Bx::Value>>,
79
80 /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
81 /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
82 per_local_var_debug_info:
83 Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>,
84
85 /// Caller location propagated if this function has `#[track_caller]`.
86 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
87 }
88
89 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
90 pub fn monomorphize<T>(&self, value: T) -> T
91 where
92 T: Copy + TypeFoldable<'tcx>,
93 {
94 debug!("monomorphize: self.instance={:?}", self.instance);
95 self.instance.subst_mir_and_normalize_erasing_regions(
96 self.cx.tcx(),
97 ty::ParamEnv::reveal_all(),
98 value,
99 )
100 }
101 }
102
103 enum LocalRef<'tcx, V> {
104 Place(PlaceRef<'tcx, V>),
105 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
106 /// `*p` is the fat pointer that references the actual unsized place.
107 /// Every time it is initialized, we have to reallocate the place
108 /// and update the fat pointer. That's the reason why it is indirect.
109 UnsizedPlace(PlaceRef<'tcx, V>),
110 Operand(Option<OperandRef<'tcx, V>>),
111 }
112
113 impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> {
114 fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
115 bx: &mut Bx,
116 layout: TyAndLayout<'tcx>,
117 ) -> LocalRef<'tcx, V> {
118 if layout.is_zst() {
119 // Zero-size temporaries aren't always initialized, which
120 // doesn't matter because they don't contain data, but
121 // we need something in the operand.
122 LocalRef::Operand(Some(OperandRef::new_zst(bx, layout)))
123 } else {
124 LocalRef::Operand(None)
125 }
126 }
127 }
128
129 ///////////////////////////////////////////////////////////////////////////
130
131 pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
132 cx: &'a Bx::CodegenCx,
133 instance: Instance<'tcx>,
134 ) {
135 assert!(!instance.substs.needs_infer());
136
137 let llfn = cx.get_fn(instance);
138
139 let mir = cx.tcx().instance_mir(instance.def);
140
141 let fn_abi = FnAbi::of_instance(cx, instance, &[]);
142 debug!("fn_abi: {:?}", fn_abi);
143
144 let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir);
145
146 let mut bx = Bx::new_block(cx, llfn, "start");
147
148 if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
149 bx.set_personality_fn(cx.eh_personality());
150 }
151
152 let cleanup_kinds = analyze::cleanup_kinds(&mir);
153 // Allocate a `Block` for every basic block, except
154 // the start block, if nothing loops back to it.
155 let reentrant_start_block = !mir.predecessors()[mir::START_BLOCK].is_empty();
156 let block_bxs: IndexVec<mir::BasicBlock, Bx::BasicBlock> = mir
157 .basic_blocks()
158 .indices()
159 .map(|bb| {
160 if bb == mir::START_BLOCK && !reentrant_start_block {
161 bx.llbb()
162 } else {
163 bx.build_sibling_block(&format!("{:?}", bb)).llbb()
164 }
165 })
166 .collect();
167
168 let (landing_pads, funclets) = create_funclets(&mir, &mut bx, &cleanup_kinds, &block_bxs);
169 let mut fx = FunctionCx {
170 instance,
171 mir,
172 llfn,
173 fn_abi,
174 cx,
175 personality_slot: None,
176 blocks: block_bxs,
177 unreachable_block: None,
178 cleanup_kinds,
179 landing_pads,
180 funclets,
181 locals: IndexVec::new(),
182 debug_context,
183 per_local_var_debug_info: None,
184 caller_location: None,
185 };
186
187 fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut bx);
188
189 // Evaluate all required consts; codegen later assumes that CTFE will never fail.
190 let mut all_consts_ok = true;
191 for const_ in &mir.required_consts {
192 if let Err(err) = fx.eval_mir_constant(const_) {
193 all_consts_ok = false;
194 match err {
195 // errored or at least linted
196 ErrorHandled::Reported(ErrorReported) | ErrorHandled::Linted => {}
197 ErrorHandled::TooGeneric => {
198 span_bug!(const_.span, "codgen encountered polymorphic constant: {:?}", err)
199 }
200 }
201 }
202 }
203 if !all_consts_ok {
204 // We leave the IR in some half-built state here, and rely on this code not even being
205 // submitted to LLVM once an error was raised.
206 return;
207 }
208
209 let memory_locals = analyze::non_ssa_locals(&fx);
210
211 // Allocate variable and temp allocas
212 fx.locals = {
213 let args = arg_local_refs(&mut bx, &mut fx, &memory_locals);
214
215 let mut allocate_local = |local| {
216 let decl = &mir.local_decls[local];
217 let layout = bx.layout_of(fx.monomorphize(decl.ty));
218 assert!(!layout.ty.has_erasable_regions());
219
220 if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
221 debug!("alloc: {:?} (return place) -> place", local);
222 let llretptr = bx.get_param(0);
223 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
224 }
225
226 if memory_locals.contains(local) {
227 debug!("alloc: {:?} -> place", local);
228 if layout.is_unsized() {
229 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut bx, layout))
230 } else {
231 LocalRef::Place(PlaceRef::alloca(&mut bx, layout))
232 }
233 } else {
234 debug!("alloc: {:?} -> operand", local);
235 LocalRef::new_operand(&mut bx, layout)
236 }
237 };
238
239 let retptr = allocate_local(mir::RETURN_PLACE);
240 iter::once(retptr)
241 .chain(args.into_iter())
242 .chain(mir.vars_and_temps_iter().map(allocate_local))
243 .collect()
244 };
245
246 // Apply debuginfo to the newly allocated locals.
247 fx.debug_introduce_locals(&mut bx);
248
249 // Branch to the START block, if it's not the entry block.
250 if reentrant_start_block {
251 bx.br(fx.blocks[mir::START_BLOCK]);
252 }
253
254 let rpo = traversal::reverse_postorder(&mir);
255 let mut visited = BitSet::new_empty(mir.basic_blocks().len());
256
257 // Codegen the body of each block using reverse postorder
258 for (bb, _) in rpo {
259 visited.insert(bb.index());
260 fx.codegen_block(bb);
261 }
262
263 // Remove blocks that haven't been visited, or have no
264 // predecessors.
265 for bb in mir.basic_blocks().indices() {
266 // Unreachable block
267 if !visited.contains(bb.index()) {
268 debug!("codegen_mir: block {:?} was not visited", bb);
269 unsafe {
270 bx.delete_basic_block(fx.blocks[bb]);
271 }
272 }
273 }
274 }
275
276 fn create_funclets<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
277 mir: &'tcx mir::Body<'tcx>,
278 bx: &mut Bx,
279 cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
280 block_bxs: &IndexVec<mir::BasicBlock, Bx::BasicBlock>,
281 ) -> (
282 IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
283 IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
284 ) {
285 block_bxs
286 .iter_enumerated()
287 .zip(cleanup_kinds)
288 .map(|((bb, &llbb), cleanup_kind)| {
289 match *cleanup_kind {
290 CleanupKind::Funclet if base::wants_msvc_seh(bx.sess()) => {}
291 _ => return (None, None),
292 }
293
294 let funclet;
295 let ret_llbb;
296 match mir[bb].terminator.as_ref().map(|t| &t.kind) {
297 // This is a basic block that we're aborting the program for,
298 // notably in an `extern` function. These basic blocks are inserted
299 // so that we assert that `extern` functions do indeed not panic,
300 // and if they do we abort the process.
301 //
302 // On MSVC these are tricky though (where we're doing funclets). If
303 // we were to do a cleanuppad (like below) the normal functions like
304 // `longjmp` would trigger the abort logic, terminating the
305 // program. Instead we insert the equivalent of `catch(...)` for C++
306 // which magically doesn't trigger when `longjmp` files over this
307 // frame.
308 //
309 // Lots more discussion can be found on #48251 but this codegen is
310 // modeled after clang's for:
311 //
312 // try {
313 // foo();
314 // } catch (...) {
315 // bar();
316 // }
317 Some(&mir::TerminatorKind::Abort) => {
318 let mut cs_bx = bx.build_sibling_block(&format!("cs_funclet{:?}", bb));
319 let mut cp_bx = bx.build_sibling_block(&format!("cp_funclet{:?}", bb));
320 ret_llbb = cs_bx.llbb();
321
322 let cs = cs_bx.catch_switch(None, None, 1);
323 cs_bx.add_handler(cs, cp_bx.llbb());
324
325 // The "null" here is actually a RTTI type descriptor for the
326 // C++ personality function, but `catch (...)` has no type so
327 // it's null. The 64 here is actually a bitfield which
328 // represents that this is a catch-all block.
329 let null = bx.const_null(
330 bx.type_i8p_ext(bx.cx().data_layout().instruction_address_space),
331 );
332 let sixty_four = bx.const_i32(64);
333 funclet = cp_bx.catch_pad(cs, &[null, sixty_four, null]);
334 cp_bx.br(llbb);
335 }
336 _ => {
337 let mut cleanup_bx = bx.build_sibling_block(&format!("funclet_{:?}", bb));
338 ret_llbb = cleanup_bx.llbb();
339 funclet = cleanup_bx.cleanup_pad(None, &[]);
340 cleanup_bx.br(llbb);
341 }
342 };
343
344 (Some(ret_llbb), Some(funclet))
345 })
346 .unzip()
347 }
348
349 /// Produces, for each argument, a `Value` pointing at the
350 /// argument's value. As arguments are places, these are always
351 /// indirect.
352 fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
353 bx: &mut Bx,
354 fx: &mut FunctionCx<'a, 'tcx, Bx>,
355 memory_locals: &BitSet<mir::Local>,
356 ) -> Vec<LocalRef<'tcx, Bx::Value>> {
357 let mir = fx.mir;
358 let mut idx = 0;
359 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
360
361 let args = mir
362 .args_iter()
363 .enumerate()
364 .map(|(arg_index, local)| {
365 let arg_decl = &mir.local_decls[local];
366
367 if Some(local) == mir.spread_arg {
368 // This argument (e.g., the last argument in the "rust-call" ABI)
369 // is a tuple that was spread at the ABI level and now we have
370 // to reconstruct it into a tuple local variable, from multiple
371 // individual LLVM function arguments.
372
373 let arg_ty = fx.monomorphize(arg_decl.ty);
374 let tupled_arg_tys = match arg_ty.kind() {
375 ty::Tuple(tys) => tys,
376 _ => bug!("spread argument isn't a tuple?!"),
377 };
378
379 let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
380 for i in 0..tupled_arg_tys.len() {
381 let arg = &fx.fn_abi.args[idx];
382 idx += 1;
383 if arg.pad.is_some() {
384 llarg_idx += 1;
385 }
386 let pr_field = place.project_field(bx, i);
387 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
388 }
389
390 return LocalRef::Place(place);
391 }
392
393 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
394 let arg_ty = fx.monomorphize(arg_decl.ty);
395
396 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
397 bx.va_start(va_list.llval);
398
399 return LocalRef::Place(va_list);
400 }
401
402 let arg = &fx.fn_abi.args[idx];
403 idx += 1;
404 if arg.pad.is_some() {
405 llarg_idx += 1;
406 }
407
408 if !memory_locals.contains(local) {
409 // We don't have to cast or keep the argument in the alloca.
410 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
411 // of putting everything in allocas just so we can use llvm.dbg.declare.
412 let local = |op| LocalRef::Operand(Some(op));
413 match arg.mode {
414 PassMode::Ignore => {
415 return local(OperandRef::new_zst(bx, arg.layout));
416 }
417 PassMode::Direct(_) => {
418 let llarg = bx.get_param(llarg_idx);
419 llarg_idx += 1;
420 return local(OperandRef::from_immediate_or_packed_pair(
421 bx, llarg, arg.layout,
422 ));
423 }
424 PassMode::Pair(..) => {
425 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
426 llarg_idx += 2;
427
428 return local(OperandRef {
429 val: OperandValue::Pair(a, b),
430 layout: arg.layout,
431 });
432 }
433 _ => {}
434 }
435 }
436
437 if arg.is_sized_indirect() {
438 // Don't copy an indirect argument to an alloca, the caller
439 // already put it in a temporary alloca and gave it up.
440 // FIXME: lifetimes
441 let llarg = bx.get_param(llarg_idx);
442 llarg_idx += 1;
443 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
444 } else if arg.is_unsized_indirect() {
445 // As the storage for the indirect argument lives during
446 // the whole function call, we just copy the fat pointer.
447 let llarg = bx.get_param(llarg_idx);
448 llarg_idx += 1;
449 let llextra = bx.get_param(llarg_idx);
450 llarg_idx += 1;
451 let indirect_operand = OperandValue::Pair(llarg, llextra);
452
453 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
454 indirect_operand.store(bx, tmp);
455 LocalRef::UnsizedPlace(tmp)
456 } else {
457 let tmp = PlaceRef::alloca(bx, arg.layout);
458 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
459 LocalRef::Place(tmp)
460 }
461 })
462 .collect::<Vec<_>>();
463
464 if fx.instance.def.requires_caller_location(bx.tcx()) {
465 assert_eq!(
466 fx.fn_abi.args.len(),
467 args.len() + 1,
468 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR",
469 );
470
471 let arg = fx.fn_abi.args.last().unwrap();
472 match arg.mode {
473 PassMode::Direct(_) => (),
474 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
475 }
476
477 fx.caller_location = Some(OperandRef {
478 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
479 layout: arg.layout,
480 });
481 }
482
483 args
484 }
485
486 mod analyze;
487 mod block;
488 pub mod constant;
489 pub mod coverageinfo;
490 pub mod debuginfo;
491 mod intrinsic;
492 pub mod operand;
493 pub mod place;
494 mod rvalue;
495 mod statement;