]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_ssa/src/mir/mod.rs
New upstream version 1.70.0+dfsg1
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / mod.rs
1 use crate::base;
2 use crate::traits::*;
3 use rustc_middle::mir;
4 use rustc_middle::mir::interpret::ErrorHandled;
5 use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, TyAndLayout};
6 use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
7 use rustc_target::abi::call::{FnAbi, PassMode};
8
9 use std::iter;
10
11 use rustc_index::bit_set::BitSet;
12 use rustc_index::vec::IndexVec;
13
14 use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
15 use self::place::PlaceRef;
16 use rustc_middle::mir::traversal;
17
18 use self::operand::{OperandRef, OperandValue};
19
20 // Used for tracking the state of generated basic blocks.
21 enum CachedLlbb<T> {
22 /// Nothing created yet.
23 None,
24
25 /// Has been created.
26 Some(T),
27
28 /// Nothing created yet, and nothing should be.
29 Skip,
30 }
31
32 /// Master context for codegenning from MIR.
33 pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
34 instance: Instance<'tcx>,
35
36 mir: &'tcx mir::Body<'tcx>,
37
38 debug_context: Option<FunctionDebugContext<Bx::DIScope, Bx::DILocation>>,
39
40 llfn: Bx::Function,
41
42 cx: &'a Bx::CodegenCx,
43
44 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
45
46 /// When unwinding is initiated, we have to store this personality
47 /// value somewhere so that we can load it and re-use it in the
48 /// resume instruction. The personality is (afaik) some kind of
49 /// value used for C++ unwinding, which must filter by type: we
50 /// don't really care about it very much. Anyway, this value
51 /// contains an alloca into which the personality is stored and
52 /// then later loaded when generating the DIVERGE_BLOCK.
53 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
54
55 /// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily
56 /// as-needed (e.g. RPO reaching it or another block branching to it).
57 // FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
58 // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
59 cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
60
61 /// The funclet status of each basic block
62 cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>,
63
64 /// When targeting MSVC, this stores the cleanup info for each funclet BB.
65 /// This is initialized at the same time as the `landing_pads` entry for the
66 /// funclets' head block, i.e. when needed by an unwind / `cleanup_ret` edge.
67 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
68
69 /// This stores the cached landing/cleanup pad block for a given BB.
70 // FIXME(eddyb) rename this to `eh_pads`.
71 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
72
73 /// Cached unreachable block
74 unreachable_block: Option<Bx::BasicBlock>,
75
76 /// Cached terminate upon unwinding block
77 terminate_block: Option<Bx::BasicBlock>,
78
79 /// The location where each MIR arg/var/tmp/ret is stored. This is
80 /// usually an `PlaceRef` representing an alloca, but not always:
81 /// sometimes we can skip the alloca and just store the value
82 /// directly using an `OperandRef`, which makes for tighter LLVM
83 /// IR. The conditions for using an `OperandRef` are as follows:
84 ///
85 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
86 /// - the operand must never be referenced indirectly
87 /// - we should not take its address using the `&` operator
88 /// - nor should it appear in a place path like `tmp.a`
89 /// - the operand must be defined by an rvalue that can generate immediate
90 /// values
91 ///
92 /// Avoiding allocs can also be important for certain intrinsics,
93 /// notably `expect`.
94 locals: IndexVec<mir::Local, LocalRef<'tcx, Bx::Value>>,
95
96 /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
97 /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
98 per_local_var_debug_info:
99 Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>,
100
101 /// Caller location propagated if this function has `#[track_caller]`.
102 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
103 }
104
105 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
106 pub fn monomorphize<T>(&self, value: T) -> T
107 where
108 T: Copy + TypeFoldable<TyCtxt<'tcx>>,
109 {
110 debug!("monomorphize: self.instance={:?}", self.instance);
111 self.instance.subst_mir_and_normalize_erasing_regions(
112 self.cx.tcx(),
113 ty::ParamEnv::reveal_all(),
114 value,
115 )
116 }
117 }
118
119 enum LocalRef<'tcx, V> {
120 Place(PlaceRef<'tcx, V>),
121 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
122 /// `*p` is the fat pointer that references the actual unsized place.
123 /// Every time it is initialized, we have to reallocate the place
124 /// and update the fat pointer. That's the reason why it is indirect.
125 UnsizedPlace(PlaceRef<'tcx, V>),
126 /// The backend [`OperandValue`] has already been generated.
127 Operand(OperandRef<'tcx, V>),
128 /// Will be a `Self::Operand` once we get to its definition.
129 PendingOperand,
130 }
131
132 impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> {
133 fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
134 bx: &mut Bx,
135 layout: TyAndLayout<'tcx>,
136 ) -> LocalRef<'tcx, V> {
137 if layout.is_zst() {
138 // Zero-size temporaries aren't always initialized, which
139 // doesn't matter because they don't contain data, but
140 // we need something in the operand.
141 LocalRef::Operand(OperandRef::new_zst(bx, layout))
142 } else {
143 LocalRef::PendingOperand
144 }
145 }
146 }
147
148 ///////////////////////////////////////////////////////////////////////////
149
150 #[instrument(level = "debug", skip(cx))]
151 pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
152 cx: &'a Bx::CodegenCx,
153 instance: Instance<'tcx>,
154 ) {
155 assert!(!instance.substs.needs_infer());
156
157 let llfn = cx.get_fn(instance);
158
159 let mir = cx.tcx().instance_mir(instance.def);
160
161 let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
162 debug!("fn_abi: {:?}", fn_abi);
163
164 let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir);
165
166 let start_llbb = Bx::append_block(cx, llfn, "start");
167 let mut start_bx = Bx::build(cx, start_llbb);
168
169 if mir.basic_blocks.iter().any(|bb| {
170 bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate))
171 }) {
172 start_bx.set_personality_fn(cx.eh_personality());
173 }
174
175 let cleanup_kinds = base::wants_msvc_seh(cx.tcx().sess).then(|| analyze::cleanup_kinds(&mir));
176
177 let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
178 mir.basic_blocks
179 .indices()
180 .map(|bb| {
181 if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
182 })
183 .collect();
184
185 let mut fx = FunctionCx {
186 instance,
187 mir,
188 llfn,
189 fn_abi,
190 cx,
191 personality_slot: None,
192 cached_llbbs,
193 unreachable_block: None,
194 terminate_block: None,
195 cleanup_kinds,
196 landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
197 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
198 locals: IndexVec::new(),
199 debug_context,
200 per_local_var_debug_info: None,
201 caller_location: None,
202 };
203
204 fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx);
205
206 // Evaluate all required consts; codegen later assumes that CTFE will never fail.
207 let mut all_consts_ok = true;
208 for const_ in &mir.required_consts {
209 if let Err(err) = fx.eval_mir_constant(const_) {
210 all_consts_ok = false;
211 match err {
212 // errored or at least linted
213 ErrorHandled::Reported(_) => {}
214 ErrorHandled::TooGeneric => {
215 span_bug!(const_.span, "codegen encountered polymorphic constant: {:?}", err)
216 }
217 }
218 }
219 }
220 if !all_consts_ok {
221 // We leave the IR in some half-built state here, and rely on this code not even being
222 // submitted to LLVM once an error was raised.
223 return;
224 }
225
226 let memory_locals = analyze::non_ssa_locals(&fx);
227
228 // Allocate variable and temp allocas
229 fx.locals = {
230 let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
231
232 let mut allocate_local = |local| {
233 let decl = &mir.local_decls[local];
234 let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
235 assert!(!layout.ty.has_erasable_regions());
236
237 if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
238 debug!("alloc: {:?} (return place) -> place", local);
239 let llretptr = start_bx.get_param(0);
240 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
241 }
242
243 if memory_locals.contains(local) {
244 debug!("alloc: {:?} -> place", local);
245 if layout.is_unsized() {
246 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
247 } else {
248 LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
249 }
250 } else {
251 debug!("alloc: {:?} -> operand", local);
252 LocalRef::new_operand(&mut start_bx, layout)
253 }
254 };
255
256 let retptr = allocate_local(mir::RETURN_PLACE);
257 iter::once(retptr)
258 .chain(args.into_iter())
259 .chain(mir.vars_and_temps_iter().map(allocate_local))
260 .collect()
261 };
262
263 // Apply debuginfo to the newly allocated locals.
264 fx.debug_introduce_locals(&mut start_bx);
265
266 // The builders will be created separately for each basic block at `codegen_block`.
267 // So drop the builder of `start_llbb` to avoid having two at the same time.
268 drop(start_bx);
269
270 // Codegen the body of each block using reverse postorder
271 for (bb, _) in traversal::reverse_postorder(&mir) {
272 fx.codegen_block(bb);
273 }
274 }
275
276 /// Produces, for each argument, a `Value` pointing at the
277 /// argument's value. As arguments are places, these are always
278 /// indirect.
279 fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
280 bx: &mut Bx,
281 fx: &mut FunctionCx<'a, 'tcx, Bx>,
282 memory_locals: &BitSet<mir::Local>,
283 ) -> Vec<LocalRef<'tcx, Bx::Value>> {
284 let mir = fx.mir;
285 let mut idx = 0;
286 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
287
288 let mut num_untupled = None;
289
290 let args = mir
291 .args_iter()
292 .enumerate()
293 .map(|(arg_index, local)| {
294 let arg_decl = &mir.local_decls[local];
295
296 if Some(local) == mir.spread_arg {
297 // This argument (e.g., the last argument in the "rust-call" ABI)
298 // is a tuple that was spread at the ABI level and now we have
299 // to reconstruct it into a tuple local variable, from multiple
300 // individual LLVM function arguments.
301
302 let arg_ty = fx.monomorphize(arg_decl.ty);
303 let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
304 bug!("spread argument isn't a tuple?!");
305 };
306
307 let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
308 for i in 0..tupled_arg_tys.len() {
309 let arg = &fx.fn_abi.args[idx];
310 idx += 1;
311 if let PassMode::Cast(_, true) = arg.mode {
312 llarg_idx += 1;
313 }
314 let pr_field = place.project_field(bx, i);
315 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
316 }
317 assert_eq!(
318 None,
319 num_untupled.replace(tupled_arg_tys.len()),
320 "Replaced existing num_tupled"
321 );
322
323 return LocalRef::Place(place);
324 }
325
326 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
327 let arg_ty = fx.monomorphize(arg_decl.ty);
328
329 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
330 bx.va_start(va_list.llval);
331
332 return LocalRef::Place(va_list);
333 }
334
335 let arg = &fx.fn_abi.args[idx];
336 idx += 1;
337 if let PassMode::Cast(_, true) = arg.mode {
338 llarg_idx += 1;
339 }
340
341 if !memory_locals.contains(local) {
342 // We don't have to cast or keep the argument in the alloca.
343 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
344 // of putting everything in allocas just so we can use llvm.dbg.declare.
345 let local = |op| LocalRef::Operand(op);
346 match arg.mode {
347 PassMode::Ignore => {
348 return local(OperandRef::new_zst(bx, arg.layout));
349 }
350 PassMode::Direct(_) => {
351 let llarg = bx.get_param(llarg_idx);
352 llarg_idx += 1;
353 return local(OperandRef::from_immediate_or_packed_pair(
354 bx, llarg, arg.layout,
355 ));
356 }
357 PassMode::Pair(..) => {
358 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
359 llarg_idx += 2;
360
361 return local(OperandRef {
362 val: OperandValue::Pair(a, b),
363 layout: arg.layout,
364 });
365 }
366 _ => {}
367 }
368 }
369
370 if arg.is_sized_indirect() {
371 // Don't copy an indirect argument to an alloca, the caller
372 // already put it in a temporary alloca and gave it up.
373 // FIXME: lifetimes
374 let llarg = bx.get_param(llarg_idx);
375 llarg_idx += 1;
376 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
377 } else if arg.is_unsized_indirect() {
378 // As the storage for the indirect argument lives during
379 // the whole function call, we just copy the fat pointer.
380 let llarg = bx.get_param(llarg_idx);
381 llarg_idx += 1;
382 let llextra = bx.get_param(llarg_idx);
383 llarg_idx += 1;
384 let indirect_operand = OperandValue::Pair(llarg, llextra);
385
386 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
387 indirect_operand.store(bx, tmp);
388 LocalRef::UnsizedPlace(tmp)
389 } else {
390 let tmp = PlaceRef::alloca(bx, arg.layout);
391 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
392 LocalRef::Place(tmp)
393 }
394 })
395 .collect::<Vec<_>>();
396
397 if fx.instance.def.requires_caller_location(bx.tcx()) {
398 let mir_args = if let Some(num_untupled) = num_untupled {
399 // Subtract off the tupled argument that gets 'expanded'
400 args.len() - 1 + num_untupled
401 } else {
402 args.len()
403 };
404 assert_eq!(
405 fx.fn_abi.args.len(),
406 mir_args + 1,
407 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
408 fx.instance
409 );
410
411 let arg = fx.fn_abi.args.last().unwrap();
412 match arg.mode {
413 PassMode::Direct(_) => (),
414 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
415 }
416
417 fx.caller_location = Some(OperandRef {
418 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
419 layout: arg.layout,
420 });
421 }
422
423 args
424 }
425
426 mod analyze;
427 mod block;
428 pub mod constant;
429 pub mod coverageinfo;
430 pub mod debuginfo;
431 mod intrinsic;
432 pub mod operand;
433 pub mod place;
434 mod rvalue;
435 mod statement;