3 use rustc_errors
::ErrorReported
;
5 use rustc_middle
::mir
::interpret
::ErrorHandled
;
6 use rustc_middle
::ty
::layout
::{FnAbiExt, HasTyCtxt, TyAndLayout}
;
7 use rustc_middle
::ty
::{self, Instance, Ty, TypeFoldable}
;
8 use rustc_target
::abi
::call
::{FnAbi, PassMode}
;
9 use rustc_target
::abi
::HasDataLayout
;
13 use rustc_index
::bit_set
::BitSet
;
14 use rustc_index
::vec
::IndexVec
;
16 use self::analyze
::CleanupKind
;
17 use self::debuginfo
::{FunctionDebugContext, PerLocalVarDebugInfo}
;
18 use self::place
::PlaceRef
;
19 use rustc_middle
::mir
::traversal
;
21 use self::operand
::{OperandRef, OperandValue}
;
23 /// Master context for codegenning from MIR.
24 pub struct FunctionCx
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> {
25 instance
: Instance
<'tcx
>,
27 mir
: &'tcx mir
::Body
<'tcx
>,
29 debug_context
: Option
<FunctionDebugContext
<Bx
::DIScope
, Bx
::DILocation
>>,
33 cx
: &'a Bx
::CodegenCx
,
35 fn_abi
: FnAbi
<'tcx
, Ty
<'tcx
>>,
37 /// When unwinding is initiated, we have to store this personality
38 /// value somewhere so that we can load it and re-use it in the
39 /// resume instruction. The personality is (afaik) some kind of
40 /// value used for C++ unwinding, which must filter by type: we
41 /// don't really care about it very much. Anyway, this value
42 /// contains an alloca into which the personality is stored and
43 /// then later loaded when generating the DIVERGE_BLOCK.
44 personality_slot
: Option
<PlaceRef
<'tcx
, Bx
::Value
>>,
46 /// A `Block` for each MIR `BasicBlock`
47 blocks
: IndexVec
<mir
::BasicBlock
, Bx
::BasicBlock
>,
49 /// The funclet status of each basic block
50 cleanup_kinds
: IndexVec
<mir
::BasicBlock
, analyze
::CleanupKind
>,
52 /// When targeting MSVC, this stores the cleanup info for each funclet
53 /// BB. This is initialized as we compute the funclets' head block in RPO.
54 funclets
: IndexVec
<mir
::BasicBlock
, Option
<Bx
::Funclet
>>,
56 /// This stores the landing-pad block for a given BB, computed lazily on GNU
57 /// and eagerly on MSVC.
58 landing_pads
: IndexVec
<mir
::BasicBlock
, Option
<Bx
::BasicBlock
>>,
60 /// Cached unreachable block
61 unreachable_block
: Option
<Bx
::BasicBlock
>,
63 /// The location where each MIR arg/var/tmp/ret is stored. This is
64 /// usually an `PlaceRef` representing an alloca, but not always:
65 /// sometimes we can skip the alloca and just store the value
66 /// directly using an `OperandRef`, which makes for tighter LLVM
67 /// IR. The conditions for using an `OperandRef` are as follows:
69 /// - the type of the local must be judged "immediate" by `is_llvm_immediate`
70 /// - the operand must never be referenced indirectly
71 /// - we should not take its address using the `&` operator
72 /// - nor should it appear in a place path like `tmp.a`
73 /// - the operand must be defined by an rvalue that can generate immediate
76 /// Avoiding allocs can also be important for certain intrinsics,
78 locals
: IndexVec
<mir
::Local
, LocalRef
<'tcx
, Bx
::Value
>>,
80 /// All `VarDebugInfo` from the MIR body, partitioned by `Local`.
81 /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed.
82 per_local_var_debug_info
:
83 Option
<IndexVec
<mir
::Local
, Vec
<PerLocalVarDebugInfo
<'tcx
, Bx
::DIVariable
>>>>,
85 /// Caller location propagated if this function has `#[track_caller]`.
86 caller_location
: Option
<OperandRef
<'tcx
, Bx
::Value
>>,
89 impl<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>> FunctionCx
<'a
, 'tcx
, Bx
> {
90 pub fn monomorphize
<T
>(&self, value
: T
) -> T
92 T
: Copy
+ TypeFoldable
<'tcx
>,
94 debug
!("monomorphize: self.instance={:?}", self.instance
);
95 self.instance
.subst_mir_and_normalize_erasing_regions(
97 ty
::ParamEnv
::reveal_all(),
103 enum LocalRef
<'tcx
, V
> {
104 Place(PlaceRef
<'tcx
, V
>),
105 /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
106 /// `*p` is the fat pointer that references the actual unsized place.
107 /// Every time it is initialized, we have to reallocate the place
108 /// and update the fat pointer. That's the reason why it is indirect.
109 UnsizedPlace(PlaceRef
<'tcx
, V
>),
110 Operand(Option
<OperandRef
<'tcx
, V
>>),
113 impl<'a
, 'tcx
, V
: CodegenObject
> LocalRef
<'tcx
, V
> {
114 fn new_operand
<Bx
: BuilderMethods
<'a
, 'tcx
, Value
= V
>>(
116 layout
: TyAndLayout
<'tcx
>,
117 ) -> LocalRef
<'tcx
, V
> {
119 // Zero-size temporaries aren't always initialized, which
120 // doesn't matter because they don't contain data, but
121 // we need something in the operand.
122 LocalRef
::Operand(Some(OperandRef
::new_zst(bx
, layout
)))
124 LocalRef
::Operand(None
)
129 ///////////////////////////////////////////////////////////////////////////
131 pub fn codegen_mir
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
132 cx
: &'a Bx
::CodegenCx
,
133 instance
: Instance
<'tcx
>,
135 assert
!(!instance
.substs
.needs_infer());
137 let llfn
= cx
.get_fn(instance
);
139 let mir
= cx
.tcx().instance_mir(instance
.def
);
141 let fn_abi
= FnAbi
::of_instance(cx
, instance
, &[]);
142 debug
!("fn_abi: {:?}", fn_abi
);
144 let debug_context
= cx
.create_function_debug_context(instance
, &fn_abi
, llfn
, &mir
);
146 let mut bx
= Bx
::new_block(cx
, llfn
, "start");
148 if mir
.basic_blocks().iter().any(|bb
| bb
.is_cleanup
) {
149 bx
.set_personality_fn(cx
.eh_personality());
152 let cleanup_kinds
= analyze
::cleanup_kinds(&mir
);
153 // Allocate a `Block` for every basic block, except
154 // the start block, if nothing loops back to it.
155 let reentrant_start_block
= !mir
.predecessors()[mir
::START_BLOCK
].is_empty();
156 let block_bxs
: IndexVec
<mir
::BasicBlock
, Bx
::BasicBlock
> = mir
160 if bb
== mir
::START_BLOCK
&& !reentrant_start_block
{
163 bx
.build_sibling_block(&format
!("{:?}", bb
)).llbb()
168 let (landing_pads
, funclets
) = create_funclets(&mir
, &mut bx
, &cleanup_kinds
, &block_bxs
);
169 let mut fx
= FunctionCx
{
175 personality_slot
: None
,
177 unreachable_block
: None
,
181 locals
: IndexVec
::new(),
183 per_local_var_debug_info
: None
,
184 caller_location
: None
,
187 fx
.per_local_var_debug_info
= fx
.compute_per_local_var_debug_info(&mut bx
);
189 // Evaluate all required consts; codegen later assumes that CTFE will never fail.
190 let mut all_consts_ok
= true;
191 for const_
in &mir
.required_consts
{
192 if let Err(err
) = fx
.eval_mir_constant(const_
) {
193 all_consts_ok
= false;
195 // errored or at least linted
196 ErrorHandled
::Reported(ErrorReported
) | ErrorHandled
::Linted
=> {}
197 ErrorHandled
::TooGeneric
=> {
198 span_bug
!(const_
.span
, "codgen encountered polymorphic constant: {:?}", err
)
204 // We leave the IR in some half-built state here, and rely on this code not even being
205 // submitted to LLVM once an error was raised.
209 let memory_locals
= analyze
::non_ssa_locals(&fx
);
211 // Allocate variable and temp allocas
213 let args
= arg_local_refs(&mut bx
, &mut fx
, &memory_locals
);
215 let mut allocate_local
= |local
| {
216 let decl
= &mir
.local_decls
[local
];
217 let layout
= bx
.layout_of(fx
.monomorphize(decl
.ty
));
218 assert
!(!layout
.ty
.has_erasable_regions());
220 if local
== mir
::RETURN_PLACE
&& fx
.fn_abi
.ret
.is_indirect() {
221 debug
!("alloc: {:?} (return place) -> place", local
);
222 let llretptr
= bx
.get_param(0);
223 return LocalRef
::Place(PlaceRef
::new_sized(llretptr
, layout
));
226 if memory_locals
.contains(local
) {
227 debug
!("alloc: {:?} -> place", local
);
228 if layout
.is_unsized() {
229 LocalRef
::UnsizedPlace(PlaceRef
::alloca_unsized_indirect(&mut bx
, layout
))
231 LocalRef
::Place(PlaceRef
::alloca(&mut bx
, layout
))
234 debug
!("alloc: {:?} -> operand", local
);
235 LocalRef
::new_operand(&mut bx
, layout
)
239 let retptr
= allocate_local(mir
::RETURN_PLACE
);
241 .chain(args
.into_iter())
242 .chain(mir
.vars_and_temps_iter().map(allocate_local
))
246 // Apply debuginfo to the newly allocated locals.
247 fx
.debug_introduce_locals(&mut bx
);
249 // Branch to the START block, if it's not the entry block.
250 if reentrant_start_block
{
251 bx
.br(fx
.blocks
[mir
::START_BLOCK
]);
254 let rpo
= traversal
::reverse_postorder(&mir
);
255 let mut visited
= BitSet
::new_empty(mir
.basic_blocks().len());
257 // Codegen the body of each block using reverse postorder
259 visited
.insert(bb
.index());
260 fx
.codegen_block(bb
);
263 // Remove blocks that haven't been visited, or have no
265 for bb
in mir
.basic_blocks().indices() {
267 if !visited
.contains(bb
.index()) {
268 debug
!("codegen_mir: block {:?} was not visited", bb
);
270 bx
.delete_basic_block(fx
.blocks
[bb
]);
276 fn create_funclets
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
277 mir
: &'tcx mir
::Body
<'tcx
>,
279 cleanup_kinds
: &IndexVec
<mir
::BasicBlock
, CleanupKind
>,
280 block_bxs
: &IndexVec
<mir
::BasicBlock
, Bx
::BasicBlock
>,
282 IndexVec
<mir
::BasicBlock
, Option
<Bx
::BasicBlock
>>,
283 IndexVec
<mir
::BasicBlock
, Option
<Bx
::Funclet
>>,
288 .map(|((bb
, &llbb
), cleanup_kind
)| {
289 match *cleanup_kind
{
290 CleanupKind
::Funclet
if base
::wants_msvc_seh(bx
.sess()) => {}
291 _
=> return (None
, None
),
296 match mir
[bb
].terminator
.as_ref().map(|t
| &t
.kind
) {
297 // This is a basic block that we're aborting the program for,
298 // notably in an `extern` function. These basic blocks are inserted
299 // so that we assert that `extern` functions do indeed not panic,
300 // and if they do we abort the process.
302 // On MSVC these are tricky though (where we're doing funclets). If
303 // we were to do a cleanuppad (like below) the normal functions like
304 // `longjmp` would trigger the abort logic, terminating the
305 // program. Instead we insert the equivalent of `catch(...)` for C++
306 // which magically doesn't trigger when `longjmp` files over this
309 // Lots more discussion can be found on #48251 but this codegen is
310 // modeled after clang's for:
317 Some(&mir
::TerminatorKind
::Abort
) => {
318 let mut cs_bx
= bx
.build_sibling_block(&format
!("cs_funclet{:?}", bb
));
319 let mut cp_bx
= bx
.build_sibling_block(&format
!("cp_funclet{:?}", bb
));
320 ret_llbb
= cs_bx
.llbb();
322 let cs
= cs_bx
.catch_switch(None
, None
, 1);
323 cs_bx
.add_handler(cs
, cp_bx
.llbb());
325 // The "null" here is actually a RTTI type descriptor for the
326 // C++ personality function, but `catch (...)` has no type so
327 // it's null. The 64 here is actually a bitfield which
328 // represents that this is a catch-all block.
329 let null
= bx
.const_null(
330 bx
.type_i8p_ext(bx
.cx().data_layout().instruction_address_space
),
332 let sixty_four
= bx
.const_i32(64);
333 funclet
= cp_bx
.catch_pad(cs
, &[null
, sixty_four
, null
]);
337 let mut cleanup_bx
= bx
.build_sibling_block(&format
!("funclet_{:?}", bb
));
338 ret_llbb
= cleanup_bx
.llbb();
339 funclet
= cleanup_bx
.cleanup_pad(None
, &[]);
344 (Some(ret_llbb
), Some(funclet
))
349 /// Produces, for each argument, a `Value` pointing at the
350 /// argument's value. As arguments are places, these are always
352 fn arg_local_refs
<'a
, 'tcx
, Bx
: BuilderMethods
<'a
, 'tcx
>>(
354 fx
: &mut FunctionCx
<'a
, 'tcx
, Bx
>,
355 memory_locals
: &BitSet
<mir
::Local
>,
356 ) -> Vec
<LocalRef
<'tcx
, Bx
::Value
>> {
359 let mut llarg_idx
= fx
.fn_abi
.ret
.is_indirect() as usize;
364 .map(|(arg_index
, local
)| {
365 let arg_decl
= &mir
.local_decls
[local
];
367 if Some(local
) == mir
.spread_arg
{
368 // This argument (e.g., the last argument in the "rust-call" ABI)
369 // is a tuple that was spread at the ABI level and now we have
370 // to reconstruct it into a tuple local variable, from multiple
371 // individual LLVM function arguments.
373 let arg_ty
= fx
.monomorphize(arg_decl
.ty
);
374 let tupled_arg_tys
= match arg_ty
.kind() {
375 ty
::Tuple(tys
) => tys
,
376 _
=> bug
!("spread argument isn't a tuple?!"),
379 let place
= PlaceRef
::alloca(bx
, bx
.layout_of(arg_ty
));
380 for i
in 0..tupled_arg_tys
.len() {
381 let arg
= &fx
.fn_abi
.args
[idx
];
383 if arg
.pad
.is_some() {
386 let pr_field
= place
.project_field(bx
, i
);
387 bx
.store_fn_arg(arg
, &mut llarg_idx
, pr_field
);
390 return LocalRef
::Place(place
);
393 if fx
.fn_abi
.c_variadic
&& arg_index
== fx
.fn_abi
.args
.len() {
394 let arg_ty
= fx
.monomorphize(arg_decl
.ty
);
396 let va_list
= PlaceRef
::alloca(bx
, bx
.layout_of(arg_ty
));
397 bx
.va_start(va_list
.llval
);
399 return LocalRef
::Place(va_list
);
402 let arg
= &fx
.fn_abi
.args
[idx
];
404 if arg
.pad
.is_some() {
408 if !memory_locals
.contains(local
) {
409 // We don't have to cast or keep the argument in the alloca.
410 // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
411 // of putting everything in allocas just so we can use llvm.dbg.declare.
412 let local
= |op
| LocalRef
::Operand(Some(op
));
414 PassMode
::Ignore
=> {
415 return local(OperandRef
::new_zst(bx
, arg
.layout
));
417 PassMode
::Direct(_
) => {
418 let llarg
= bx
.get_param(llarg_idx
);
420 return local(OperandRef
::from_immediate_or_packed_pair(
421 bx
, llarg
, arg
.layout
,
424 PassMode
::Pair(..) => {
425 let (a
, b
) = (bx
.get_param(llarg_idx
), bx
.get_param(llarg_idx
+ 1));
428 return local(OperandRef
{
429 val
: OperandValue
::Pair(a
, b
),
437 if arg
.is_sized_indirect() {
438 // Don't copy an indirect argument to an alloca, the caller
439 // already put it in a temporary alloca and gave it up.
441 let llarg
= bx
.get_param(llarg_idx
);
443 LocalRef
::Place(PlaceRef
::new_sized(llarg
, arg
.layout
))
444 } else if arg
.is_unsized_indirect() {
445 // As the storage for the indirect argument lives during
446 // the whole function call, we just copy the fat pointer.
447 let llarg
= bx
.get_param(llarg_idx
);
449 let llextra
= bx
.get_param(llarg_idx
);
451 let indirect_operand
= OperandValue
::Pair(llarg
, llextra
);
453 let tmp
= PlaceRef
::alloca_unsized_indirect(bx
, arg
.layout
);
454 indirect_operand
.store(bx
, tmp
);
455 LocalRef
::UnsizedPlace(tmp
)
457 let tmp
= PlaceRef
::alloca(bx
, arg
.layout
);
458 bx
.store_fn_arg(arg
, &mut llarg_idx
, tmp
);
462 .collect
::<Vec
<_
>>();
464 if fx
.instance
.def
.requires_caller_location(bx
.tcx()) {
466 fx
.fn_abi
.args
.len(),
468 "#[track_caller] fn's must have 1 more argument in their ABI than in their MIR",
471 let arg
= fx
.fn_abi
.args
.last().unwrap();
473 PassMode
::Direct(_
) => (),
474 _
=> bug
!("caller location must be PassMode::Direct, found {:?}", arg
.mode
),
477 fx
.caller_location
= Some(OperandRef
{
478 val
: OperandValue
::Immediate(bx
.get_param(llarg_idx
)),
489 pub mod coverageinfo
;