]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_codegen_ssa/src/mir/mod.rs
New upstream version 1.57.0+dfsg1
[rustc.git] / compiler / rustc_codegen_ssa / src / mir / mod.rs
CommitLineData
9fa01778 1use crate::traits::*;
f9f354fc 2use rustc_errors::ErrorReported;
ba9703b0 3use rustc_middle::mir;
f9f354fc 4use rustc_middle::mir::interpret::ErrorHandled;
c295e0f8 5use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, TyAndLayout};
ba9703b0 6use rustc_middle::ty::{self, Instance, Ty, TypeFoldable};
dfeec247 7use rustc_target::abi::call::{FnAbi, PassMode};
a7813a04 8
5bcae85e 9use std::iter;
54a0048b 10
e74abb32
XL
11use rustc_index::bit_set::BitSet;
12use rustc_index::vec::IndexVec;
54a0048b 13
74b04a01 14use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
ff7c6d11 15use self::place::PlaceRef;
ba9703b0 16use rustc_middle::mir::traversal;
92a42be0 17
a7813a04 18use self::operand::{OperandRef, OperandValue};
92a42be0 19
94b46f34 20/// Master context for codegenning from MIR.
dc9dc135 21pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
0531ce1d
XL
22 instance: Instance<'tcx>,
23
f9f354fc 24 mir: &'tcx mir::Body<'tcx>,
32a655c1 25
29967ef6 26 debug_context: Option<FunctionDebugContext<Bx::DIScope, Bx::DILocation>>,
32a655c1 27
e74abb32 28 llfn: Bx::Function,
92a42be0 29
a1dfa0c6 30 cx: &'a Bx::CodegenCx,
32a655c1 31
c295e0f8 32 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
9cc50fc6 33
92a42be0
SL
34 /// When unwinding is initiated, we have to store this personality
35 /// value somewhere so that we can load it and re-use it in the
36 /// resume instruction. The personality is (afaik) some kind of
37 /// value used for C++ unwinding, which must filter by type: we
38 /// don't really care about it very much. Anyway, this value
39 /// contains an alloca into which the personality is stored and
40 /// then later loaded when generating the DIVERGE_BLOCK.
dc9dc135 41 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
92a42be0 42
17df50a5
XL
43 /// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily
44 /// as-needed (e.g. RPO reaching it or another block branching to it).
45 // FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
46 // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
47 cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
3157f602
XL
48
49 /// The funclet status of each basic block
50 cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
51
17df50a5
XL
52 /// When targeting MSVC, this stores the cleanup info for each funclet BB.
53 /// This is initialized at the same time as the `landing_pads` entry for the
54 /// funclets' head block, i.e. when needed by an unwind / `cleanup_ret` edge.
a1dfa0c6 55 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
7cac9316 56
17df50a5
XL
57 /// This stores the cached landing/cleanup pad block for a given BB.
58 // FIXME(eddyb) rename this to `eh_pads`.
a1dfa0c6 59 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
92a42be0 60
9cc50fc6 61 /// Cached unreachable block
a1dfa0c6 62 unreachable_block: Option<Bx::BasicBlock>,
9cc50fc6 63
3157f602 64 /// The location where each MIR arg/var/tmp/ret is stored. This is
ff7c6d11 65 /// usually an `PlaceRef` representing an alloca, but not always:
92a42be0
SL
66 /// sometimes we can skip the alloca and just store the value
67 /// directly using an `OperandRef`, which makes for tighter LLVM
68 /// IR. The conditions for using an `OperandRef` are as follows:
69 ///
ff7c6d11 70 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
92a42be0
SL
71 /// - the operand must never be referenced indirectly
72 /// - we should not take its address using the `&` operator
ff7c6d11 73 /// - nor should it appear in a place path like `tmp.a`
92a42be0
SL
74 /// - the operand must be defined by an rvalue that can generate immediate
75 /// values
76 ///
77 /// Avoiding allocs can also be important for certain intrinsics,
78 /// notably `expect`.
a1dfa0c6 79 locals: IndexVec<mir::Local, LocalRef<'tcx, Bx::Value>>,
a7813a04 80
74b04a01
XL
81 /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
82 /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
83 per_local_var_debug_info:
84 Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>,
60c5eb7d
XL
85
86 /// Caller location propagated if this function has `#[track_caller]`.
87 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
3157f602
XL
88}
89
dc9dc135 90impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fc512014 91 pub fn monomorphize<T>(&self, value: T) -> T
dfeec247 92 where
ba9703b0 93 T: Copy + TypeFoldable<'tcx>,
cc61c64b 94 {
ba9703b0 95 debug!("monomorphize: self.instance={:?}", self.instance);
29967ef6
XL
96 self.instance.subst_mir_and_normalize_erasing_regions(
97 self.cx.tcx(),
98 ty::ParamEnv::reveal_all(),
99 value,
100 )
32a655c1 101 }
92a42be0
SL
102}
103
a1dfa0c6
XL
104enum LocalRef<'tcx, V> {
105 Place(PlaceRef<'tcx, V>),
b7449926
XL
106 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
107 /// `*p` is the fat pointer that references the actual unsized place.
108 /// Every time it is initialized, we have to reallocate the place
109 /// and update the fat pointer. That's the reason why it is indirect.
a1dfa0c6
XL
110 UnsizedPlace(PlaceRef<'tcx, V>),
111 Operand(Option<OperandRef<'tcx, V>>),
92a42be0
SL
112}
113
dc9dc135 114impl<'a, 'tcx, V: CodegenObject> LocalRef<'tcx, V> {
532ac7d7
XL
115 fn new_operand<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
116 bx: &mut Bx,
ba9703b0 117 layout: TyAndLayout<'tcx>,
a1dfa0c6 118 ) -> LocalRef<'tcx, V> {
ff7c6d11 119 if layout.is_zst() {
a7813a04
XL
120 // Zero-size temporaries aren't always initialized, which
121 // doesn't matter because they don't contain data, but
122 // we need something in the operand.
532ac7d7 123 LocalRef::Operand(Some(OperandRef::new_zst(bx, layout)))
a7813a04 124 } else {
3157f602 125 LocalRef::Operand(None)
a7813a04
XL
126 }
127 }
128}
129
92a42be0
SL
130///////////////////////////////////////////////////////////////////////////
131
c295e0f8 132#[instrument(level = "debug", skip(cx))]
dc9dc135 133pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
a1dfa0c6 134 cx: &'a Bx::CodegenCx,
32a655c1 135 instance: Instance<'tcx>,
32a655c1 136) {
532ac7d7
XL
137 assert!(!instance.substs.needs_infer());
138
60c5eb7d
XL
139 let llfn = cx.get_fn(instance);
140
141 let mir = cx.tcx().instance_mir(instance.def);
e74abb32 142
c295e0f8 143 let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
60c5eb7d
XL
144 debug!("fn_abi: {:?}", fn_abi);
145
146 let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir);
e74abb32 147
17df50a5
XL
148 let start_llbb = Bx::append_block(cx, llfn, "start");
149 let mut bx = Bx::build(cx, start_llbb);
32a655c1 150
7cac9316 151 if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
2c00a5a8 152 bx.set_personality_fn(cx.eh_personality());
7cac9316 153 }
92a42be0 154
7cac9316 155 let cleanup_kinds = analyze::cleanup_kinds(&mir);
c295e0f8
XL
156 let cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>> = mir
157 .basic_blocks()
158 .indices()
159 .map(|bb| if bb == mir::START_BLOCK { Some(start_llbb) } else { None })
160 .collect();
9e0c209e 161
2c00a5a8 162 let mut fx = FunctionCx {
0531ce1d 163 instance,
3b2f2976
XL
164 mir,
165 llfn,
60c5eb7d 166 fn_abi,
2c00a5a8 167 cx,
ff7c6d11 168 personality_slot: None,
17df50a5 169 cached_llbbs,
9e0c209e 170 unreachable_block: None,
3b2f2976 171 cleanup_kinds,
17df50a5
XL
172 landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
173 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks().len()),
9e0c209e 174 locals: IndexVec::new(),
3b2f2976 175 debug_context,
74b04a01 176 per_local_var_debug_info: None,
60c5eb7d 177 caller_location: None,
9e0c209e
SL
178 };
179
fc512014 180 fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut bx);
74b04a01 181
5869c6ff
XL
182 // Evaluate all required consts; codegen later assumes that CTFE will never fail.
183 let mut all_consts_ok = true;
f9f354fc
XL
184 for const_ in &mir.required_consts {
185 if let Err(err) = fx.eval_mir_constant(const_) {
5869c6ff 186 all_consts_ok = false;
f9f354fc
XL
187 match err {
188 // errored or at least linted
189 ErrorHandled::Reported(ErrorReported) | ErrorHandled::Linted => {}
190 ErrorHandled::TooGeneric => {
191 span_bug!(const_.span, "codgen encountered polymorphic constant: {:?}", err)
192 }
193 }
194 }
195 }
5869c6ff
XL
196 if !all_consts_ok {
197 // We leave the IR in some half-built state here, and rely on this code not even being
198 // submitted to LLVM once an error was raised.
199 return;
200 }
f9f354fc 201
83c7162d 202 let memory_locals = analyze::non_ssa_locals(&fx);
32a655c1 203
92a42be0 204 // Allocate variable and temp allocas
2c00a5a8 205 fx.locals = {
60c5eb7d 206 let args = arg_local_refs(&mut bx, &mut fx, &memory_locals);
c30ab7b3
SL
207
208 let mut allocate_local = |local| {
f9f354fc 209 let decl = &mir.local_decls[local];
fc512014 210 let layout = bx.layout_of(fx.monomorphize(decl.ty));
94222f64 211 assert!(!layout.ty.has_erasable_regions(cx.tcx()));
3157f602 212
60c5eb7d 213 if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
e74abb32
XL
214 debug!("alloc: {:?} (return place) -> place", local);
215 let llretptr = bx.get_param(0);
216 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
217 }
c30ab7b3 218
e74abb32
XL
219 if memory_locals.contains(local) {
220 debug!("alloc: {:?} -> place", local);
b7449926 221 if layout.is_unsized() {
e74abb32 222 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut bx, layout))
b7449926 223 } else {
e74abb32 224 LocalRef::Place(PlaceRef::alloca(&mut bx, layout))
c30ab7b3 225 }
3157f602 226 } else {
e74abb32
XL
227 debug!("alloc: {:?} -> operand", local);
228 LocalRef::new_operand(&mut bx, layout)
3157f602 229 }
c30ab7b3
SL
230 };
231
ff7c6d11 232 let retptr = allocate_local(mir::RETURN_PLACE);
c30ab7b3
SL
233 iter::once(retptr)
234 .chain(args.into_iter())
f9f354fc 235 .chain(mir.vars_and_temps_iter().map(allocate_local))
c30ab7b3 236 .collect()
3157f602 237 };
92a42be0 238
e74abb32
XL
239 // Apply debuginfo to the newly allocated locals.
240 fx.debug_introduce_locals(&mut bx);
241
94b46f34 242 // Codegen the body of each block using reverse postorder
17df50a5
XL
243 // FIXME(eddyb) reuse RPO iterator between `analysis` and this.
244 for (bb, _) in traversal::reverse_postorder(&mir) {
94b46f34 245 fx.codegen_block(bb);
92a42be0 246 }
7cac9316
XL
247}
248
9fa01778 249/// Produces, for each argument, a `Value` pointing at the
ff7c6d11 250/// argument's value. As arguments are places, these are always
92a42be0 251/// indirect.
dc9dc135 252fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
a1dfa0c6 253 bx: &mut Bx,
60c5eb7d 254 fx: &mut FunctionCx<'a, 'tcx, Bx>,
0bf4aa26 255 memory_locals: &BitSet<mir::Local>,
a1dfa0c6 256) -> Vec<LocalRef<'tcx, Bx::Value>> {
2c00a5a8 257 let mir = fx.mir;
54a0048b 258 let mut idx = 0;
60c5eb7d 259 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
a7813a04 260
c295e0f8
XL
261 let mut num_untupled = None;
262
dfeec247
XL
263 let args = mir
264 .args_iter()
265 .enumerate()
266 .map(|(arg_index, local)| {
267 let arg_decl = &mir.local_decls[local];
268
269 if Some(local) == mir.spread_arg {
270 // This argument (e.g., the last argument in the "rust-call" ABI)
271 // is a tuple that was spread at the ABI level and now we have
272 // to reconstruct it into a tuple local variable, from multiple
273 // individual LLVM function arguments.
274
fc512014 275 let arg_ty = fx.monomorphize(arg_decl.ty);
1b1a35ee
XL
276 let tupled_arg_tys = match arg_ty.kind() {
277 ty::Tuple(tys) => tys,
dfeec247
XL
278 _ => bug!("spread argument isn't a tuple?!"),
279 };
280
281 let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
282 for i in 0..tupled_arg_tys.len() {
283 let arg = &fx.fn_abi.args[idx];
284 idx += 1;
285 if arg.pad.is_some() {
286 llarg_idx += 1;
287 }
288 let pr_field = place.project_field(bx, i);
289 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
54a0048b 290 }
c295e0f8
XL
291 assert_eq!(
292 None,
293 num_untupled.replace(tupled_arg_tys.len()),
294 "Replaced existing num_tupled"
295 );
1bb2cb6e 296
dfeec247
XL
297 return LocalRef::Place(place);
298 }
54a0048b 299
dfeec247 300 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
fc512014 301 let arg_ty = fx.monomorphize(arg_decl.ty);
e74abb32 302
dfeec247
XL
303 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
304 bx.va_start(va_list.llval);
e74abb32 305
dfeec247
XL
306 return LocalRef::Place(va_list);
307 }
e74abb32 308
dfeec247
XL
309 let arg = &fx.fn_abi.args[idx];
310 idx += 1;
311 if arg.pad.is_some() {
312 llarg_idx += 1;
313 }
3157f602 314
dfeec247
XL
315 if !memory_locals.contains(local) {
316 // We don't have to cast or keep the argument in the alloca.
317 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
318 // of putting everything in allocas just so we can use llvm.dbg.declare.
319 let local = |op| LocalRef::Operand(Some(op));
320 match arg.mode {
321 PassMode::Ignore => {
322 return local(OperandRef::new_zst(bx, arg.layout));
323 }
324 PassMode::Direct(_) => {
325 let llarg = bx.get_param(llarg_idx);
326 llarg_idx += 1;
327 return local(OperandRef::from_immediate_or_packed_pair(
328 bx, llarg, arg.layout,
329 ));
330 }
331 PassMode::Pair(..) => {
332 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
333 llarg_idx += 2;
334
335 return local(OperandRef {
336 val: OperandValue::Pair(a, b),
337 layout: arg.layout,
338 });
339 }
340 _ => {}
ff7c6d11 341 }
3157f602 342 }
ff7c6d11 343
dfeec247
XL
344 if arg.is_sized_indirect() {
345 // Don't copy an indirect argument to an alloca, the caller
346 // already put it in a temporary alloca and gave it up.
347 // FIXME: lifetimes
348 let llarg = bx.get_param(llarg_idx);
349 llarg_idx += 1;
350 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
351 } else if arg.is_unsized_indirect() {
352 // As the storage for the indirect argument lives during
353 // the whole function call, we just copy the fat pointer.
354 let llarg = bx.get_param(llarg_idx);
355 llarg_idx += 1;
356 let llextra = bx.get_param(llarg_idx);
357 llarg_idx += 1;
358 let indirect_operand = OperandValue::Pair(llarg, llextra);
359
360 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
361 indirect_operand.store(bx, tmp);
362 LocalRef::UnsizedPlace(tmp)
363 } else {
364 let tmp = PlaceRef::alloca(bx, arg.layout);
365 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
366 LocalRef::Place(tmp)
367 }
368 })
369 .collect::<Vec<_>>();
60c5eb7d
XL
370
371 if fx.instance.def.requires_caller_location(bx.tcx()) {
c295e0f8
XL
372 let mir_args = if let Some(num_untupled) = num_untupled {
373 // Subtract off the tupled argument that gets 'expanded'
374 args.len() - 1 + num_untupled
375 } else {
376 args.len()
377 };
60c5eb7d 378 assert_eq!(
dfeec247 379 fx.fn_abi.args.len(),
c295e0f8
XL
380 mir_args + 1,
381 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
382 fx.instance
60c5eb7d
XL
383 );
384
385 let arg = fx.fn_abi.args.last().unwrap();
386 match arg.mode {
387 PassMode::Direct(_) => (),
388 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
389 }
390
391 fx.caller_location = Some(OperandRef {
392 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
393 layout: arg.layout,
394 });
395 }
396
397 args
92a42be0
SL
398}
399
400mod analyze;
401mod block;
a1dfa0c6 402pub mod constant;
3dfed10e 403pub mod coverageinfo;
e74abb32 404pub mod debuginfo;
1b1a35ee 405mod intrinsic;
ff7c6d11 406pub mod operand;
dfeec247 407pub mod place;
7453a54e 408mod rvalue;
92a42be0 409mod statement;