4 use rustc_middle
::mir
::interpret
::ErrorHandled
;
5 use rustc_middle
::ty
::layout
::{FnAbiOf, HasTyCtxt, TyAndLayout}
;
6 use rustc_middle
::ty
::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt}
;
7 use rustc_target
::abi
::call
::{FnAbi, PassMode}
;
11 use rustc_index
::bit_set
::BitSet
;
12 use rustc_index
::vec
::IndexVec
;
14 use self::debuginfo
::{FunctionDebugContext, PerLocalVarDebugInfo}
;
15 use self::place
::PlaceRef
;
16 use rustc_middle
::mir
::traversal
;
18 use self::operand
::{OperandRef, OperandValue}
;
20 // Used for tracking the state of generated basic blocks.
22 /// Nothing created yet.
28 /// Nothing created yet, and nothing should be.
32 /// Master context for codegenning from MIR.
33 pub struct FunctionCx
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> {
34 instance
: Instance
<'tcx
>,
36 mir
: &'tcx mir
::Body
<'tcx
>,
38 debug_context
: Option
<FunctionDebugContext
<Bx
::DIScope
, Bx
::DILocation
>>,
42 cx
: &'a Bx
::CodegenCx
,
44 fn_abi
: &'tcx FnAbi
<'tcx
, Ty
<'tcx
>>,
46 /// When unwinding is initiated, we have to store this personality
47 /// value somewhere so that we can load it and re-use it in the
48 /// resume instruction. The personality is (afaik) some kind of
49 /// value used for C++ unwinding, which must filter by type: we
50 /// don't really care about it very much. Anyway, this value
51 /// contains an alloca into which the personality is stored and
52 /// then later loaded when generating the DIVERGE_BLOCK.
53 personality_slot
: Option
<PlaceRef
<'tcx
, Bx
::Value
>>,
55 /// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily
56 /// as-needed (e.g. RPO reaching it or another block branching to it).
57 // FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a
58 // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`).
59 cached_llbbs
: IndexVec
<mir
::BasicBlock
, CachedLlbb
<Bx
::BasicBlock
>>,
61 /// The funclet status of each basic block
62 cleanup_kinds
: Option
<IndexVec
<mir
::BasicBlock
, analyze
::CleanupKind
>>,
64 /// When targeting MSVC, this stores the cleanup info for each funclet BB.
65 /// This is initialized at the same time as the `landing_pads` entry for the
66 /// funclets' head block, i.e. when needed by an unwind / `cleanup_ret` edge.
67 funclets
: IndexVec
<mir
::BasicBlock
, Option
<Bx
::Funclet
>>,
69 /// This stores the cached landing/cleanup pad block for a given BB.
70 // FIXME(eddyb) rename this to `eh_pads`.
71 landing_pads
: IndexVec
<mir
::BasicBlock
, Option
<Bx
::BasicBlock
>>,
73 /// Cached unreachable block
74 unreachable_block
: Option
<Bx
::BasicBlock
>,
76 /// Cached terminate upon unwinding block
77 terminate_block
: Option
<Bx
::BasicBlock
>,
79 /// The location where each MIR arg/var/tmp/ret is stored. This is
80 /// usually an `PlaceRef` representing an alloca, but not always:
81 /// sometimes we can skip the alloca and just store the value
82 /// directly using an `OperandRef`, which makes for tighter LLVM
83 /// IR. The conditions for using an `OperandRef` are as follows:
85 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
86 /// - the operand must never be referenced indirectly
87 /// - we should not take its address using the `&` operator
88 /// - nor should it appear in a place path like `tmp.a`
89 /// - the operand must be defined by an rvalue that can generate immediate
92 /// Avoiding allocs can also be important for certain intrinsics,
94 locals
: IndexVec
<mir
::Local
, LocalRef
<'tcx
, Bx
::Value
>>,
96 /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
97 /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
98 per_local_var_debug_info
:
99 Option
<IndexVec
<mir
::Local
, Vec
<PerLocalVarDebugInfo
<'tcx
, Bx
::DIVariable
>>>>,
101 /// Caller location propagated if this function has `#[track_caller]`.
102 caller_location
: Option
<OperandRef
<'tcx
, Bx
::Value
>>,
105 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
106 pub fn monomorphize
<T
>(&self, value
: T
) -> T
108 T
: Copy
+ TypeFoldable
<TyCtxt
<'tcx
>>,
110 debug
!("monomorphize: self.instance={:?}", self.instance
);
111 self.instance
.subst_mir_and_normalize_erasing_regions(
113 ty
::ParamEnv
::reveal_all(),
119 enum LocalRef
<'tcx
, V
> {
120 Place(PlaceRef
<'tcx
, V
>),
121 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
122 /// `*p` is the fat pointer that references the actual unsized place.
123 /// Every time it is initialized, we have to reallocate the place
124 /// and update the fat pointer. That's the reason why it is indirect.
125 UnsizedPlace(PlaceRef
<'tcx
, V
>),
126 /// The backend [`OperandValue`] has already been generated.
127 Operand(OperandRef
<'tcx
, V
>),
128 /// Will be a `Self::Operand` once we get to its definition.
132 impl<'a
, 'tcx
, V
: CodegenObject
> LocalRef
<'tcx
, V
> {
133 fn new_operand
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
135 layout
: TyAndLayout
<'tcx
>,
136 ) -> LocalRef
<'tcx
, V
> {
138 // Zero-size temporaries aren't always initialized, which
139 // doesn't matter because they don't contain data, but
140 // we need something in the operand.
141 LocalRef
::Operand(OperandRef
::new_zst(bx
, layout
))
143 LocalRef
::PendingOperand
148 ///////////////////////////////////////////////////////////////////////////
150 #[instrument(level = "debug", skip(cx))]
151 pub fn codegen_mir
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
152 cx
: &'a Bx
::CodegenCx
,
153 instance
: Instance
<'tcx
>,
155 assert
!(!instance
.substs
.needs_infer());
157 let llfn
= cx
.get_fn(instance
);
159 let mir
= cx
.tcx().instance_mir(instance
.def
);
161 let fn_abi
= cx
.fn_abi_of_instance(instance
, ty
::List
::empty());
162 debug
!("fn_abi: {:?}", fn_abi
);
164 let debug_context
= cx
.create_function_debug_context(instance
, &fn_abi
, llfn
, &mir
);
166 let start_llbb
= Bx
::append_block(cx
, llfn
, "start");
167 let mut start_bx
= Bx
::build(cx
, start_llbb
);
169 if mir
.basic_blocks
.iter().any(|bb
| {
170 bb
.is_cleanup
|| matches
!(bb
.terminator().unwind(), Some(mir
::UnwindAction
::Terminate
))
172 start_bx
.set_personality_fn(cx
.eh_personality());
175 let cleanup_kinds
= base
::wants_msvc_seh(cx
.tcx().sess
).then(|| analyze
::cleanup_kinds(&mir
));
177 let cached_llbbs
: IndexVec
<mir
::BasicBlock
, CachedLlbb
<Bx
::BasicBlock
>> =
181 if bb
== mir
::START_BLOCK { CachedLlbb::Some(start_llbb) }
else { CachedLlbb::None }
185 let mut fx
= FunctionCx
{
191 personality_slot
: None
,
193 unreachable_block
: None
,
194 terminate_block
: None
,
196 landing_pads
: IndexVec
::from_elem(None
, &mir
.basic_blocks
),
197 funclets
: IndexVec
::from_fn_n(|_
| None
, mir
.basic_blocks
.len()),
198 locals
: IndexVec
::new(),
200 per_local_var_debug_info
: None
,
201 caller_location
: None
,
204 fx
.per_local_var_debug_info
= fx
.compute_per_local_var_debug_info(&mut start_bx
);
206 // Evaluate all required consts; codegen later assumes that CTFE will never fail.
207 let mut all_consts_ok
= true;
208 for const_
in &mir
.required_consts
{
209 if let Err(err
) = fx
.eval_mir_constant(const_
) {
210 all_consts_ok
= false;
212 // errored or at least linted
213 ErrorHandled
::Reported(_
) => {}
214 ErrorHandled
::TooGeneric
=> {
215 span_bug
!(const_
.span
, "codegen encountered polymorphic constant: {:?}", err
)
221 // We leave the IR in some half-built state here, and rely on this code not even being
222 // submitted to LLVM once an error was raised.
226 let memory_locals
= analyze
::non_ssa_locals(&fx
);
228 // Allocate variable and temp allocas
230 let args
= arg_local_refs(&mut start_bx
, &mut fx
, &memory_locals
);
232 let mut allocate_local
= |local
| {
233 let decl
= &mir
.local_decls
[local
];
234 let layout
= start_bx
.layout_of(fx
.monomorphize(decl
.ty
));
235 assert
!(!layout
.ty
.has_erasable_regions());
237 if local
== mir
::RETURN_PLACE
&& fx
.fn_abi
.ret
.is_indirect() {
238 debug
!("alloc: {:?} (return place) -> place", local
);
239 let llretptr
= start_bx
.get_param(0);
240 return LocalRef
::Place(PlaceRef
::new_sized(llretptr
, layout
));
243 if memory_locals
.contains(local
) {
244 debug
!("alloc: {:?} -> place", local
);
245 if layout
.is_unsized() {
246 LocalRef
::UnsizedPlace(PlaceRef
::alloca_unsized_indirect(&mut start_bx
, layout
))
248 LocalRef
::Place(PlaceRef
::alloca(&mut start_bx
, layout
))
251 debug
!("alloc: {:?} -> operand", local
);
252 LocalRef
::new_operand(&mut start_bx
, layout
)
256 let retptr
= allocate_local(mir
::RETURN_PLACE
);
258 .chain(args
.into_iter())
259 .chain(mir
.vars_and_temps_iter().map(allocate_local
))
263 // Apply debuginfo to the newly allocated locals.
264 fx
.debug_introduce_locals(&mut start_bx
);
266 // The builders will be created separately for each basic block at `codegen_block`.
267 // So drop the builder of `start_llbb` to avoid having two at the same time.
270 // Codegen the body of each block using reverse postorder
271 for (bb
, _
) in traversal
::reverse_postorder(&mir
) {
272 fx
.codegen_block(bb
);
276 /// Produces, for each argument, a `Value` pointing at the
277 /// argument's value. As arguments are places, these are always
279 fn arg_local_refs
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
281 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
282 memory_locals
: &BitSet
<mir
::Local
>,
283 ) -> Vec
<LocalRef
<'tcx
, Bx
::Value
>> {
286 let mut llarg_idx
= fx
.fn_abi
.ret
.is_indirect() as usize;
288 let mut num_untupled
= None
;
293 .map(|(arg_index
, local
)| {
294 let arg_decl
= &mir
.local_decls
[local
];
296 if Some(local
) == mir
.spread_arg
{
297 // This argument (e.g., the last argument in the "rust-call" ABI)
298 // is a tuple that was spread at the ABI level and now we have
299 // to reconstruct it into a tuple local variable, from multiple
300 // individual LLVM function arguments.
302 let arg_ty
= fx
.monomorphize(arg_decl
.ty
);
303 let ty
::Tuple(tupled_arg_tys
) = arg_ty
.kind() else {
304 bug
!("spread argument isn't a tuple?!");
307 let place
= PlaceRef
::alloca(bx
, bx
.layout_of(arg_ty
));
308 for i
in 0..tupled_arg_tys
.len() {
309 let arg
= &fx
.fn_abi
.args
[idx
];
311 if let PassMode
::Cast(_
, true) = arg
.mode
{
314 let pr_field
= place
.project_field(bx
, i
);
315 bx
.store_fn_arg(arg
, &mut llarg_idx
, pr_field
);
319 num_untupled
.replace(tupled_arg_tys
.len()),
320 "Replaced existing num_tupled"
323 return LocalRef
::Place(place
);
326 if fx
.fn_abi
.c_variadic
&& arg_index
== fx
.fn_abi
.args
.len() {
327 let arg_ty
= fx
.monomorphize(arg_decl
.ty
);
329 let va_list
= PlaceRef
::alloca(bx
, bx
.layout_of(arg_ty
));
330 bx
.va_start(va_list
.llval
);
332 return LocalRef
::Place(va_list
);
335 let arg
= &fx
.fn_abi
.args
[idx
];
337 if let PassMode
::Cast(_
, true) = arg
.mode
{
341 if !memory_locals
.contains(local
) {
342 // We don't have to cast or keep the argument in the alloca.
343 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
344 // of putting everything in allocas just so we can use llvm.dbg.declare.
345 let local
= |op
| LocalRef
::Operand(op
);
347 PassMode
::Ignore
=> {
348 return local(OperandRef
::new_zst(bx
, arg
.layout
));
350 PassMode
::Direct(_
) => {
351 let llarg
= bx
.get_param(llarg_idx
);
353 return local(OperandRef
::from_immediate_or_packed_pair(
354 bx
, llarg
, arg
.layout
,
357 PassMode
::Pair(..) => {
358 let (a
, b
) = (bx
.get_param(llarg_idx
), bx
.get_param(llarg_idx
+ 1));
361 return local(OperandRef
{
362 val
: OperandValue
::Pair(a
, b
),
370 if arg
.is_sized_indirect() {
371 // Don't copy an indirect argument to an alloca, the caller
372 // already put it in a temporary alloca and gave it up.
374 let llarg
= bx
.get_param(llarg_idx
);
376 LocalRef
::Place(PlaceRef
::new_sized(llarg
, arg
.layout
))
377 } else if arg
.is_unsized_indirect() {
378 // As the storage for the indirect argument lives during
379 // the whole function call, we just copy the fat pointer.
380 let llarg
= bx
.get_param(llarg_idx
);
382 let llextra
= bx
.get_param(llarg_idx
);
384 let indirect_operand
= OperandValue
::Pair(llarg
, llextra
);
386 let tmp
= PlaceRef
::alloca_unsized_indirect(bx
, arg
.layout
);
387 indirect_operand
.store(bx
, tmp
);
388 LocalRef
::UnsizedPlace(tmp
)
390 let tmp
= PlaceRef
::alloca(bx
, arg
.layout
);
391 bx
.store_fn_arg(arg
, &mut llarg_idx
, tmp
);
395 .collect
::<Vec
<_
>>();
397 if fx
.instance
.def
.requires_caller_location(bx
.tcx()) {
398 let mir_args
= if let Some(num_untupled
) = num_untupled
{
399 // Subtract off the tupled argument that gets 'expanded'
400 args
.len() - 1 + num_untupled
405 fx
.fn_abi
.args
.len(),
407 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
411 let arg
= fx
.fn_abi
.args
.last().unwrap();
413 PassMode
::Direct(_
) => (),
414 _
=> bug
!("caller location must be PassMode::Direct, found {:?}", arg
.mode
),
417 fx
.caller_location
= Some(OperandRef
{
418 val
: OperandValue
::Immediate(bx
.get_param(llarg_idx
)),
429 pub mod coverageinfo
;