]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_ssa/src/mir/mod.rs
New upstream version 1.66.0+dfsg1
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / mod.rs
CommitLineData
9fa01778 1use crate::traits::*;
ba9703b0 2use rustc_middle::mir;
f9f354fc 3use rustc_middle::mir::interpret::ErrorHandled;
c295e0f8 4use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, TyAndLayout};
064997fb 5use rustc_middle::ty::{self, Instance, Ty, TypeFoldable, TypeVisitable};
dfeec247 6use rustc_target::abi::call::{FnAbi, PassMode};
a7813a04 7
5bcae85e 8use std::iter;
54a0048b 9
e74abb32
XL
10use rustc_index::bit_set::BitSet;
11use rustc_index::vec::IndexVec;
54a0048b 12
74b04a01 13use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
ff7c6d11 14use self::place::PlaceRef;
ba9703b0 15use rustc_middle::mir::traversal;
92a42be0 16
a7813a04 17use self::operand::{OperandRef, OperandValue};
92a42be0 18
94b46f34 19/// Master context for codegenning from MIR.
dc9dc135 20pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
0531ce1d
XL
21 instance: Instance<'tcx>,
22
f9f354fc 23 mir: &'tcx mir::Body<'tcx>,
32a655c1 24
29967ef6 25 debug_context: Option<FunctionDebugContext<Bx::DIScope, Bx::DILocation>>,
32a655c1 26
e74abb32 27 llfn: Bx::Function,
92a42be0 28
a1dfa0c6 29 cx: &'a Bx::CodegenCx,
32a655c1 30
c295e0f8 31 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
9cc50fc6 32
92a42be0
SL
33 /// When unwinding is initiated, we have to store this personality
34 /// value somewhere so that we can load it and re-use it in the
35 /// resume instruction. The personality is (afaik) some kind of
36 /// value used for C++ unwinding, which must filter by type: we
37 /// don't really care about it very much. Anyway, this value
38 /// contains an alloca into which the personality is stored and
39 /// then later loaded when generating the DIVERGE_BLOCK.
dc9dc135 40 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
92a42be0 41
17df50a5
XL
42 /// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily
43 /// as-needed (e.g. RPO reaching it or another block branching to it).
44 // FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
45 // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
46 cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
3157f602
XL
47
48 /// The funclet status of each basic block
49 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
50
17df50a5
XL
51 /// When targeting MSVC, this stores the cleanup info for each funclet BB.
52 /// This is initialized at the same time as the `landing_pads` entry for the
53 /// funclets' head block, i.e. when needed by an unwind / `cleanup_ret` edge.
a1dfa0c6 54 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
7cac9316 55
17df50a5
XL
56 /// This stores the cached landing/cleanup pad block for a given BB.
57 // FIXME(eddyb) rename this to `eh_pads`.
a1dfa0c6 58 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
92a42be0 59
9cc50fc6 60 /// Cached unreachable block
a1dfa0c6 61 unreachable_block: Option<Bx::BasicBlock>,
9cc50fc6 62
5e7ed085
FG
63 /// Cached double unwind guarding block
64 double_unwind_guard: Option<Bx::BasicBlock>,
65
3157f602 66 /// The location where each MIR arg/var/tmp/ret is stored. This is
ff7c6d11 67 /// usually an `PlaceRef` representing an alloca, but not always:
92a42be0
SL
68 /// sometimes we can skip the alloca and just store the value
69 /// directly using an `OperandRef`, which makes for tighter LLVM
70 /// IR. The conditions for using an `OperandRef` are as follows:
71 ///
ff7c6d11 72 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
92a42be0
SL
73 /// - the operand must never be referenced indirectly
74 /// - we should not take its address using the `&` operator
ff7c6d11 75 /// - nor should it appear in a place path like `tmp.a`
92a42be0
SL
76 /// - the operand must be defined by an rvalue that can generate immediate
77 /// values
78 ///
79 /// Avoiding allocs can also be important for certain intrinsics,
80 /// notably `expect`.
a1dfa0c6 81 locals: IndexVec<mir::Local, LocalRef<'tcx, Bx::Value>>,
a7813a04 82
74b04a01
XL
83 /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
84 /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
85 per_local_var_debug_info:
86 Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>,
60c5eb7d
XL
87
88 /// Caller location propagated if this function has `#[track_caller]`.
89 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
3157f602
XL
90}
91
dc9dc135 92impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fc512014 93 pub fn monomorphize<T>(&self, value: T) -> T
dfeec247 94 where
ba9703b0 95 T: Copy + TypeFoldable<'tcx>,
cc61c64b 96 {
ba9703b0 97 debug!("monomorphize: self.instance={:?}", self.instance);
29967ef6
XL
98 self.instance.subst_mir_and_normalize_erasing_regions(
99 self.cx.tcx(),
100 ty::ParamEnv::reveal_all(),
101 value,
102 )
32a655c1 103 }
92a42be0
SL
104}
105
a1dfa0c6
XL
106enum LocalRef<'tcx, V> {
107 Place(PlaceRef<'tcx, V>),
b7449926
XL
108 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
109 /// `*p` is the fat pointer that references the actual unsized place.
110 /// Every time it is initialized, we have to reallocate the place
111 /// and update the fat pointer. That's the reason why it is indirect.
a1dfa0c6
XL
112 UnsizedPlace(PlaceRef<'tcx, V>),
113 Operand(Option<OperandRef<'tcx, V>>),
92a42be0
SL
114}
115
dc9dc135 116impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> {
532ac7d7
XL
117 fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
118 bx: &mut Bx,
ba9703b0 119 layout: TyAndLayout<'tcx>,
a1dfa0c6 120 ) -> LocalRef<'tcx, V> {
ff7c6d11 121 if layout.is_zst() {
a7813a04
XL
122 // Zero-size temporaries aren't always initialized, which
123 // doesn't matter because they don't contain data, but
124 // we need something in the operand.
532ac7d7 125 LocalRef::Operand(Some(OperandRef::new_zst(bx, layout)))
a7813a04 126 } else {
3157f602 127 LocalRef::Operand(None)
a7813a04
XL
128 }
129 }
130}
131
92a42be0
SL
132///////////////////////////////////////////////////////////////////////////
133
c295e0f8 134#[instrument(level = "debug", skip(cx))]
dc9dc135 135pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
a1dfa0c6 136 cx: &'a Bx::CodegenCx,
32a655c1 137 instance: Instance<'tcx>,
32a655c1 138) {
532ac7d7
XL
139 assert!(!instance.substs.needs_infer());
140
60c5eb7d
XL
141 let llfn = cx.get_fn(instance);
142
143 let mir = cx.tcx().instance_mir(instance.def);
e74abb32 144
c295e0f8 145 let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
60c5eb7d
XL
146 debug!("fn_abi: {:?}", fn_abi);
147
148 let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir);
e74abb32 149
17df50a5 150 let start_llbb = Bx::append_block(cx, llfn, "start");
2b03887a 151 let mut start_bx = Bx::build(cx, start_llbb);
32a655c1 152
f2b60f7d 153 if mir.basic_blocks.iter().any(|bb| bb.is_cleanup) {
2b03887a 154 start_bx.set_personality_fn(cx.eh_personality());
7cac9316 155 }
92a42be0 156
7cac9316 157 let cleanup_kinds = analyze::cleanup_kinds(&mir);
c295e0f8 158 let cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>> = mir
f2b60f7d 159 .basic_blocks
c295e0f8
XL
160 .indices()
161 .map(|bb| if bb == mir::START_BLOCK { Some(start_llbb) } else { None })
162 .collect();
9e0c209e 163
2c00a5a8 164 let mut fx = FunctionCx {
0531ce1d 165 instance,
3b2f2976
XL
166 mir,
167 llfn,
60c5eb7d 168 fn_abi,
2c00a5a8 169 cx,
ff7c6d11 170 personality_slot: None,
17df50a5 171 cached_llbbs,
9e0c209e 172 unreachable_block: None,
5e7ed085 173 double_unwind_guard: None,
3b2f2976 174 cleanup_kinds,
f2b60f7d
FG
175 landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
176 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
9e0c209e 177 locals: IndexVec::new(),
3b2f2976 178 debug_context,
74b04a01 179 per_local_var_debug_info: None,
60c5eb7d 180 caller_location: None,
9e0c209e
SL
181 };
182
2b03887a 183 fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx);
74b04a01 184
5869c6ff
XL
185 // Evaluate all required consts; codegen later assumes that CTFE will never fail.
186 let mut all_consts_ok = true;
f9f354fc
XL
187 for const_ in &mir.required_consts {
188 if let Err(err) = fx.eval_mir_constant(const_) {
5869c6ff 189 all_consts_ok = false;
f9f354fc
XL
190 match err {
191 // errored or at least linted
5e7ed085 192 ErrorHandled::Reported(_) | ErrorHandled::Linted => {}
f9f354fc 193 ErrorHandled::TooGeneric => {
f2b60f7d 194 span_bug!(const_.span, "codegen encountered polymorphic constant: {:?}", err)
f9f354fc
XL
195 }
196 }
197 }
198 }
5869c6ff
XL
199 if !all_consts_ok {
200 // We leave the IR in some half-built state here, and rely on this code not even being
201 // submitted to LLVM once an error was raised.
202 return;
203 }
f9f354fc 204
83c7162d 205 let memory_locals = analyze::non_ssa_locals(&fx);
32a655c1 206
92a42be0 207 // Allocate variable and temp allocas
2c00a5a8 208 fx.locals = {
2b03887a 209 let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
c30ab7b3
SL
210
211 let mut allocate_local = |local| {
f9f354fc 212 let decl = &mir.local_decls[local];
2b03887a 213 let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
5099ac24 214 assert!(!layout.ty.has_erasable_regions());
3157f602 215
60c5eb7d 216 if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
e74abb32 217 debug!("alloc: {:?} (return place) -> place", local);
2b03887a 218 let llretptr = start_bx.get_param(0);
e74abb32
XL
219 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
220 }
c30ab7b3 221
e74abb32
XL
222 if memory_locals.contains(local) {
223 debug!("alloc: {:?} -> place", local);
b7449926 224 if layout.is_unsized() {
2b03887a 225 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
b7449926 226 } else {
2b03887a 227 LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
c30ab7b3 228 }
3157f602 229 } else {
e74abb32 230 debug!("alloc: {:?} -> operand", local);
2b03887a 231 LocalRef::new_operand(&mut start_bx, layout)
3157f602 232 }
c30ab7b3
SL
233 };
234
ff7c6d11 235 let retptr = allocate_local(mir::RETURN_PLACE);
c30ab7b3
SL
236 iter::once(retptr)
237 .chain(args.into_iter())
f9f354fc 238 .chain(mir.vars_and_temps_iter().map(allocate_local))
c30ab7b3 239 .collect()
3157f602 240 };
92a42be0 241
e74abb32 242 // Apply debuginfo to the newly allocated locals.
2b03887a 243 fx.debug_introduce_locals(&mut start_bx);
e74abb32 244
94b46f34 245 // Codegen the body of each block using reverse postorder
17df50a5 246 for (bb, _) in traversal::reverse_postorder(&mir) {
94b46f34 247 fx.codegen_block(bb);
92a42be0 248 }
7cac9316
XL
249}
250
9fa01778 251/// Produces, for each argument, a `Value` pointing at the
ff7c6d11 252/// argument's value. As arguments are places, these are always
92a42be0 253/// indirect.
dc9dc135 254fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
a1dfa0c6 255 bx: &mut Bx,
60c5eb7d 256 fx: &mut FunctionCx<'a, 'tcx, Bx>,
0bf4aa26 257 memory_locals: &BitSet<mir::Local>,
a1dfa0c6 258) -> Vec<LocalRef<'tcx, Bx::Value>> {
2c00a5a8 259 let mir = fx.mir;
54a0048b 260 let mut idx = 0;
60c5eb7d 261 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
a7813a04 262
c295e0f8
XL
263 let mut num_untupled = None;
264
dfeec247
XL
265 let args = mir
266 .args_iter()
267 .enumerate()
268 .map(|(arg_index, local)| {
269 let arg_decl = &mir.local_decls[local];
270
271 if Some(local) == mir.spread_arg {
272 // This argument (e.g., the last argument in the "rust-call" ABI)
273 // is a tuple that was spread at the ABI level and now we have
274 // to reconstruct it into a tuple local variable, from multiple
275 // individual LLVM function arguments.
276
fc512014 277 let arg_ty = fx.monomorphize(arg_decl.ty);
5e7ed085
FG
278 let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
279 bug!("spread argument isn't a tuple?!");
dfeec247
XL
280 };
281
282 let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
283 for i in 0..tupled_arg_tys.len() {
284 let arg = &fx.fn_abi.args[idx];
285 idx += 1;
f2b60f7d 286 if let PassMode::Cast(_, true) = arg.mode {
dfeec247
XL
287 llarg_idx += 1;
288 }
289 let pr_field = place.project_field(bx, i);
290 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
54a0048b 291 }
c295e0f8
XL
292 assert_eq!(
293 None,
294 num_untupled.replace(tupled_arg_tys.len()),
295 "Replaced existing num_tupled"
296 );
1bb2cb6e 297
dfeec247
XL
298 return LocalRef::Place(place);
299 }
54a0048b 300
dfeec247 301 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
fc512014 302 let arg_ty = fx.monomorphize(arg_decl.ty);
e74abb32 303
dfeec247
XL
304 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
305 bx.va_start(va_list.llval);
e74abb32 306
dfeec247
XL
307 return LocalRef::Place(va_list);
308 }
e74abb32 309
dfeec247
XL
310 let arg = &fx.fn_abi.args[idx];
311 idx += 1;
f2b60f7d 312 if let PassMode::Cast(_, true) = arg.mode {
dfeec247
XL
313 llarg_idx += 1;
314 }
3157f602 315
dfeec247
XL
316 if !memory_locals.contains(local) {
317 // We don't have to cast or keep the argument in the alloca.
318 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
319 // of putting everything in allocas just so we can use llvm.dbg.declare.
320 let local = |op| LocalRef::Operand(Some(op));
321 match arg.mode {
322 PassMode::Ignore => {
323 return local(OperandRef::new_zst(bx, arg.layout));
324 }
325 PassMode::Direct(_) => {
326 let llarg = bx.get_param(llarg_idx);
327 llarg_idx += 1;
328 return local(OperandRef::from_immediate_or_packed_pair(
329 bx, llarg, arg.layout,
330 ));
331 }
332 PassMode::Pair(..) => {
333 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
334 llarg_idx += 2;
335
336 return local(OperandRef {
337 val: OperandValue::Pair(a, b),
338 layout: arg.layout,
339 });
340 }
341 _ => {}
ff7c6d11 342 }
3157f602 343 }
ff7c6d11 344
dfeec247
XL
345 if arg.is_sized_indirect() {
346 // Don't copy an indirect argument to an alloca, the caller
347 // already put it in a temporary alloca and gave it up.
348 // FIXME: lifetimes
349 let llarg = bx.get_param(llarg_idx);
350 llarg_idx += 1;
351 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
352 } else if arg.is_unsized_indirect() {
353 // As the storage for the indirect argument lives during
354 // the whole function call, we just copy the fat pointer.
355 let llarg = bx.get_param(llarg_idx);
356 llarg_idx += 1;
357 let llextra = bx.get_param(llarg_idx);
358 llarg_idx += 1;
359 let indirect_operand = OperandValue::Pair(llarg, llextra);
360
361 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
362 indirect_operand.store(bx, tmp);
363 LocalRef::UnsizedPlace(tmp)
364 } else {
365 let tmp = PlaceRef::alloca(bx, arg.layout);
366 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
367 LocalRef::Place(tmp)
368 }
369 })
370 .collect::<Vec<_>>();
60c5eb7d
XL
371
372 if fx.instance.def.requires_caller_location(bx.tcx()) {
c295e0f8
XL
373 let mir_args = if let Some(num_untupled) = num_untupled {
374 // Subtract off the tupled argument that gets 'expanded'
375 args.len() - 1 + num_untupled
376 } else {
377 args.len()
378 };
60c5eb7d 379 assert_eq!(
dfeec247 380 fx.fn_abi.args.len(),
c295e0f8
XL
381 mir_args + 1,
382 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
383 fx.instance
60c5eb7d
XL
384 );
385
386 let arg = fx.fn_abi.args.last().unwrap();
387 match arg.mode {
388 PassMode::Direct(_) => (),
389 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
390 }
391
392 fx.caller_location = Some(OperandRef {
393 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
394 layout: arg.layout,
395 });
396 }
397
398 args
92a42be0
SL
399}
400
401mod analyze;
402mod block;
a1dfa0c6 403pub mod constant;
3dfed10e 404pub mod coverageinfo;
e74abb32 405pub mod debuginfo;
1b1a35ee 406mod intrinsic;
ff7c6d11 407pub mod operand;
dfeec247 408pub mod place;
7453a54e 409mod rvalue;
92a42be0 410mod statement;