]>
Commit | Line | Data |
---|---|---|
9ffffee4 | 1 | use crate::base; |
9fa01778 | 2 | use crate::traits::*; |
fe692bf9 FG |
3 | use rustc_index::bit_set::BitSet; |
4 | use rustc_index::IndexVec; | |
ba9703b0 | 5 | use rustc_middle::mir; |
fe692bf9 | 6 | use rustc_middle::mir::traversal; |
781aab86 | 7 | use rustc_middle::mir::UnwindTerminateReason; |
c295e0f8 | 8 | use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, TyAndLayout}; |
9ffffee4 | 9 | use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt}; |
dfeec247 | 10 | use rustc_target::abi::call::{FnAbi, PassMode}; |
a7813a04 | 11 | |
5bcae85e | 12 | use std::iter; |
54a0048b | 13 | |
fe692bf9 FG |
14 | mod analyze; |
15 | mod block; | |
16 | pub mod constant; | |
17 | pub mod coverageinfo; | |
18 | pub mod debuginfo; | |
19 | mod intrinsic; | |
20 | mod locals; | |
21 | pub mod operand; | |
22 | pub mod place; | |
23 | mod rvalue; | |
24 | mod statement; | |
54a0048b | 25 | |
74b04a01 | 26 | use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo}; |
a7813a04 | 27 | use self::operand::{OperandRef, OperandValue}; |
fe692bf9 | 28 | use self::place::PlaceRef; |
92a42be0 | 29 | |
487cf647 FG |
30 | // Used for tracking the state of generated basic blocks. |
31 | enum CachedLlbb<T> { | |
32 | /// Nothing created yet. | |
33 | None, | |
34 | ||
35 | /// Has been created. | |
36 | Some(T), | |
37 | ||
38 | /// Nothing created yet, and nothing should be. | |
39 | Skip, | |
40 | } | |
41 | ||
94b46f34 | 42 | /// Master context for codegenning from MIR. |
dc9dc135 | 43 | pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { |
0531ce1d XL |
44 | instance: Instance<'tcx>, |
45 | ||
f9f354fc | 46 | mir: &'tcx mir::Body<'tcx>, |
32a655c1 | 47 | |
781aab86 | 48 | debug_context: Option<FunctionDebugContext<'tcx, Bx::DIScope, Bx::DILocation>>, |
32a655c1 | 49 | |
e74abb32 | 50 | llfn: Bx::Function, |
92a42be0 | 51 | |
a1dfa0c6 | 52 | cx: &'a Bx::CodegenCx, |
32a655c1 | 53 | |
c295e0f8 | 54 | fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>, |
9cc50fc6 | 55 | |
92a42be0 SL |
56 | /// When unwinding is initiated, we have to store this personality |
57 | /// value somewhere so that we can load it and re-use it in the | |
58 | /// resume instruction. The personality is (afaik) some kind of | |
59 | /// value used for C++ unwinding, which must filter by type: we | |
60 | /// don't really care about it very much. Anyway, this value | |
61 | /// contains an alloca into which the personality is stored and | |
62 | /// then later loaded when generating the DIVERGE_BLOCK. | |
dc9dc135 | 63 | personality_slot: Option<PlaceRef<'tcx, Bx::Value>>, |
92a42be0 | 64 | |
17df50a5 XL |
65 | /// A backend `BasicBlock` for each MIR `BasicBlock`, created lazily |
66 | /// as-needed (e.g. RPO reaching it or another block branching to it). | |
67 | // FIXME(eddyb) rename `llbbs` and other `ll`-prefixed things to use a | |
68 | // more backend-agnostic prefix such as `cg` (i.e. this would be `cgbbs`). | |
487cf647 | 69 | cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>, |
3157f602 XL |
70 | |
71 | /// The funclet status of each basic block | |
9ffffee4 | 72 | cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>, |
3157f602 | 73 | |
17df50a5 XL |
74 | /// When targeting MSVC, this stores the cleanup info for each funclet BB. |
75 | /// This is initialized at the same time as the `landing_pads` entry for the | |
76 | /// funclets' head block, i.e. when needed by an unwind / `cleanup_ret` edge. | |
a1dfa0c6 | 77 | funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>, |
7cac9316 | 78 | |
17df50a5 XL |
79 | /// This stores the cached landing/cleanup pad block for a given BB. |
80 | // FIXME(eddyb) rename this to `eh_pads`. | |
a1dfa0c6 | 81 | landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>, |
92a42be0 | 82 | |
9cc50fc6 | 83 | /// Cached unreachable block |
a1dfa0c6 | 84 | unreachable_block: Option<Bx::BasicBlock>, |
9cc50fc6 | 85 | |
781aab86 FG |
86 | /// Cached terminate upon unwinding block and its reason |
87 | terminate_block: Option<(Bx::BasicBlock, UnwindTerminateReason)>, | |
5e7ed085 | 88 | |
3157f602 | 89 | /// The location where each MIR arg/var/tmp/ret is stored. This is |
ff7c6d11 | 90 | /// usually an `PlaceRef` representing an alloca, but not always: |
92a42be0 SL |
91 | /// sometimes we can skip the alloca and just store the value |
92 | /// directly using an `OperandRef`, which makes for tighter LLVM | |
93 | /// IR. The conditions for using an `OperandRef` are as follows: | |
94 | /// | |
ff7c6d11 | 95 | /// - the type of the local must be judged "immediate" by `is_llvm_immediate` |
92a42be0 SL |
96 | /// - the operand must never be referenced indirectly |
97 | /// - we should not take its address using the `&` operator | |
ff7c6d11 | 98 | /// - nor should it appear in a place path like `tmp.a` |
92a42be0 SL |
99 | /// - the operand must be defined by an rvalue that can generate immediate |
100 | /// values | |
101 | /// | |
102 | /// Avoiding allocs can also be important for certain intrinsics, | |
103 | /// notably `expect`. | |
fe692bf9 | 104 | locals: locals::Locals<'tcx, Bx::Value>, |
a7813a04 | 105 | |
74b04a01 XL |
106 | /// All `VarDebugInfo` from the MIR body, partitioned by `Local`. |
107 | /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed. | |
108 | per_local_var_debug_info: | |
109 | Option<IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, Bx::DIVariable>>>>, | |
60c5eb7d XL |
110 | |
111 | /// Caller location propagated if this function has `#[track_caller]`. | |
112 | caller_location: Option<OperandRef<'tcx, Bx::Value>>, | |
3157f602 XL |
113 | } |
114 | ||
dc9dc135 | 115 | impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { |
fc512014 | 116 | pub fn monomorphize<T>(&self, value: T) -> T |
dfeec247 | 117 | where |
9ffffee4 | 118 | T: Copy + TypeFoldable<TyCtxt<'tcx>>, |
cc61c64b | 119 | { |
ba9703b0 | 120 | debug!("monomorphize: self.instance={:?}", self.instance); |
781aab86 | 121 | self.instance.instantiate_mir_and_normalize_erasing_regions( |
29967ef6 XL |
122 | self.cx.tcx(), |
123 | ty::ParamEnv::reveal_all(), | |
fe692bf9 | 124 | ty::EarlyBinder::bind(value), |
29967ef6 | 125 | ) |
32a655c1 | 126 | } |
92a42be0 SL |
127 | } |
128 | ||
a1dfa0c6 XL |
129 | enum LocalRef<'tcx, V> { |
130 | Place(PlaceRef<'tcx, V>), | |
b7449926 XL |
131 | /// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place). |
132 | /// `*p` is the fat pointer that references the actual unsized place. | |
133 | /// Every time it is initialized, we have to reallocate the place | |
134 | /// and update the fat pointer. That's the reason why it is indirect. | |
a1dfa0c6 | 135 | UnsizedPlace(PlaceRef<'tcx, V>), |
353b0b11 FG |
136 | /// The backend [`OperandValue`] has already been generated. |
137 | Operand(OperandRef<'tcx, V>), | |
138 | /// Will be a `Self::Operand` once we get to its definition. | |
139 | PendingOperand, | |
92a42be0 SL |
140 | } |
141 | ||
fe692bf9 FG |
142 | impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> { |
143 | fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> { | |
ff7c6d11 | 144 | if layout.is_zst() { |
a7813a04 XL |
145 | // Zero-size temporaries aren't always initialized, which |
146 | // doesn't matter because they don't contain data, but | |
781aab86 | 147 | // we need something sufficiently aligned in the operand. |
fe692bf9 | 148 | LocalRef::Operand(OperandRef::zero_sized(layout)) |
a7813a04 | 149 | } else { |
353b0b11 | 150 | LocalRef::PendingOperand |
a7813a04 XL |
151 | } |
152 | } | |
153 | } | |
154 | ||
92a42be0 SL |
155 | /////////////////////////////////////////////////////////////////////////// |
156 | ||
c295e0f8 | 157 | #[instrument(level = "debug", skip(cx))] |
dc9dc135 | 158 | pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( |
a1dfa0c6 | 159 | cx: &'a Bx::CodegenCx, |
32a655c1 | 160 | instance: Instance<'tcx>, |
32a655c1 | 161 | ) { |
add651ee | 162 | assert!(!instance.args.has_infer()); |
532ac7d7 | 163 | |
60c5eb7d XL |
164 | let llfn = cx.get_fn(instance); |
165 | ||
166 | let mir = cx.tcx().instance_mir(instance.def); | |
e74abb32 | 167 | |
c295e0f8 | 168 | let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty()); |
60c5eb7d XL |
169 | debug!("fn_abi: {:?}", fn_abi); |
170 | ||
171 | let debug_context = cx.create_function_debug_context(instance, &fn_abi, llfn, &mir); | |
e74abb32 | 172 | |
17df50a5 | 173 | let start_llbb = Bx::append_block(cx, llfn, "start"); |
2b03887a | 174 | let mut start_bx = Bx::build(cx, start_llbb); |
32a655c1 | 175 | |
353b0b11 | 176 | if mir.basic_blocks.iter().any(|bb| { |
781aab86 | 177 | bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate(_))) |
353b0b11 | 178 | }) { |
2b03887a | 179 | start_bx.set_personality_fn(cx.eh_personality()); |
7cac9316 | 180 | } |
92a42be0 | 181 | |
fe692bf9 FG |
182 | let cleanup_kinds = |
183 | base::wants_new_eh_instructions(cx.tcx().sess).then(|| analyze::cleanup_kinds(&mir)); | |
9ffffee4 | 184 | |
487cf647 FG |
185 | let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> = |
186 | mir.basic_blocks | |
187 | .indices() | |
188 | .map(|bb| { | |
189 | if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None } | |
190 | }) | |
191 | .collect(); | |
9e0c209e | 192 | |
2c00a5a8 | 193 | let mut fx = FunctionCx { |
0531ce1d | 194 | instance, |
3b2f2976 XL |
195 | mir, |
196 | llfn, | |
60c5eb7d | 197 | fn_abi, |
2c00a5a8 | 198 | cx, |
ff7c6d11 | 199 | personality_slot: None, |
17df50a5 | 200 | cached_llbbs, |
9e0c209e | 201 | unreachable_block: None, |
353b0b11 | 202 | terminate_block: None, |
3b2f2976 | 203 | cleanup_kinds, |
f2b60f7d FG |
204 | landing_pads: IndexVec::from_elem(None, &mir.basic_blocks), |
205 | funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()), | |
fe692bf9 | 206 | locals: locals::Locals::empty(), |
3b2f2976 | 207 | debug_context, |
74b04a01 | 208 | per_local_var_debug_info: None, |
60c5eb7d | 209 | caller_location: None, |
9e0c209e SL |
210 | }; |
211 | ||
2b03887a | 212 | fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx); |
74b04a01 | 213 | |
781aab86 FG |
214 | // Rust post-monomorphization checks; we later rely on them. |
215 | if let Err(err) = | |
216 | mir.post_mono_checks(cx.tcx(), ty::ParamEnv::reveal_all(), |c| Ok(fx.monomorphize(c))) | |
217 | { | |
218 | err.emit_err(cx.tcx()); | |
219 | // This IR shouldn't ever be emitted, but let's try to guard against any of this code | |
220 | // ever running. | |
221 | start_bx.abort(); | |
5869c6ff XL |
222 | return; |
223 | } | |
f9f354fc | 224 | |
83c7162d | 225 | let memory_locals = analyze::non_ssa_locals(&fx); |
32a655c1 | 226 | |
92a42be0 | 227 | // Allocate variable and temp allocas |
fe692bf9 | 228 | let local_values = { |
2b03887a | 229 | let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals); |
c30ab7b3 SL |
230 | |
231 | let mut allocate_local = |local| { | |
f9f354fc | 232 | let decl = &mir.local_decls[local]; |
2b03887a | 233 | let layout = start_bx.layout_of(fx.monomorphize(decl.ty)); |
5099ac24 | 234 | assert!(!layout.ty.has_erasable_regions()); |
3157f602 | 235 | |
60c5eb7d | 236 | if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() { |
e74abb32 | 237 | debug!("alloc: {:?} (return place) -> place", local); |
2b03887a | 238 | let llretptr = start_bx.get_param(0); |
e74abb32 XL |
239 | return LocalRef::Place(PlaceRef::new_sized(llretptr, layout)); |
240 | } | |
c30ab7b3 | 241 | |
e74abb32 XL |
242 | if memory_locals.contains(local) { |
243 | debug!("alloc: {:?} -> place", local); | |
b7449926 | 244 | if layout.is_unsized() { |
2b03887a | 245 | LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout)) |
b7449926 | 246 | } else { |
2b03887a | 247 | LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout)) |
c30ab7b3 | 248 | } |
3157f602 | 249 | } else { |
e74abb32 | 250 | debug!("alloc: {:?} -> operand", local); |
fe692bf9 | 251 | LocalRef::new_operand(layout) |
3157f602 | 252 | } |
c30ab7b3 SL |
253 | }; |
254 | ||
ff7c6d11 | 255 | let retptr = allocate_local(mir::RETURN_PLACE); |
c30ab7b3 SL |
256 | iter::once(retptr) |
257 | .chain(args.into_iter()) | |
f9f354fc | 258 | .chain(mir.vars_and_temps_iter().map(allocate_local)) |
c30ab7b3 | 259 | .collect() |
3157f602 | 260 | }; |
fe692bf9 | 261 | fx.initialize_locals(local_values); |
92a42be0 | 262 | |
e74abb32 | 263 | // Apply debuginfo to the newly allocated locals. |
2b03887a | 264 | fx.debug_introduce_locals(&mut start_bx); |
e74abb32 | 265 | |
353b0b11 FG |
266 | // The builders will be created separately for each basic block at `codegen_block`. |
267 | // So drop the builder of `start_llbb` to avoid having two at the same time. | |
268 | drop(start_bx); | |
269 | ||
94b46f34 | 270 | // Codegen the body of each block using reverse postorder |
17df50a5 | 271 | for (bb, _) in traversal::reverse_postorder(&mir) { |
94b46f34 | 272 | fx.codegen_block(bb); |
92a42be0 | 273 | } |
7cac9316 XL |
274 | } |
275 | ||
9fa01778 | 276 | /// Produces, for each argument, a `Value` pointing at the |
ff7c6d11 | 277 | /// argument's value. As arguments are places, these are always |
92a42be0 | 278 | /// indirect. |
dc9dc135 | 279 | fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( |
a1dfa0c6 | 280 | bx: &mut Bx, |
60c5eb7d | 281 | fx: &mut FunctionCx<'a, 'tcx, Bx>, |
0bf4aa26 | 282 | memory_locals: &BitSet<mir::Local>, |
a1dfa0c6 | 283 | ) -> Vec<LocalRef<'tcx, Bx::Value>> { |
2c00a5a8 | 284 | let mir = fx.mir; |
54a0048b | 285 | let mut idx = 0; |
60c5eb7d | 286 | let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize; |
a7813a04 | 287 | |
c295e0f8 XL |
288 | let mut num_untupled = None; |
289 | ||
dfeec247 XL |
290 | let args = mir |
291 | .args_iter() | |
292 | .enumerate() | |
293 | .map(|(arg_index, local)| { | |
294 | let arg_decl = &mir.local_decls[local]; | |
fe692bf9 | 295 | let arg_ty = fx.monomorphize(arg_decl.ty); |
dfeec247 XL |
296 | |
297 | if Some(local) == mir.spread_arg { | |
298 | // This argument (e.g., the last argument in the "rust-call" ABI) | |
299 | // is a tuple that was spread at the ABI level and now we have | |
300 | // to reconstruct it into a tuple local variable, from multiple | |
301 | // individual LLVM function arguments. | |
5e7ed085 FG |
302 | let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else { |
303 | bug!("spread argument isn't a tuple?!"); | |
dfeec247 XL |
304 | }; |
305 | ||
49aad941 FG |
306 | let layout = bx.layout_of(arg_ty); |
307 | ||
308 | // FIXME: support unsized params in "rust-call" ABI | |
309 | if layout.is_unsized() { | |
310 | span_bug!( | |
311 | arg_decl.source_info.span, | |
312 | "\"rust-call\" ABI does not support unsized params", | |
313 | ); | |
314 | } | |
315 | ||
316 | let place = PlaceRef::alloca(bx, layout); | |
dfeec247 XL |
317 | for i in 0..tupled_arg_tys.len() { |
318 | let arg = &fx.fn_abi.args[idx]; | |
319 | idx += 1; | |
781aab86 | 320 | if let PassMode::Cast { pad_i32: true, .. } = arg.mode { |
dfeec247 XL |
321 | llarg_idx += 1; |
322 | } | |
323 | let pr_field = place.project_field(bx, i); | |
324 | bx.store_fn_arg(arg, &mut llarg_idx, pr_field); | |
54a0048b | 325 | } |
c295e0f8 XL |
326 | assert_eq!( |
327 | None, | |
328 | num_untupled.replace(tupled_arg_tys.len()), | |
329 | "Replaced existing num_tupled" | |
330 | ); | |
1bb2cb6e | 331 | |
dfeec247 XL |
332 | return LocalRef::Place(place); |
333 | } | |
54a0048b | 334 | |
dfeec247 | 335 | if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() { |
dfeec247 XL |
336 | let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty)); |
337 | bx.va_start(va_list.llval); | |
e74abb32 | 338 | |
dfeec247 XL |
339 | return LocalRef::Place(va_list); |
340 | } | |
e74abb32 | 341 | |
dfeec247 XL |
342 | let arg = &fx.fn_abi.args[idx]; |
343 | idx += 1; | |
781aab86 | 344 | if let PassMode::Cast { pad_i32: true, .. } = arg.mode { |
dfeec247 XL |
345 | llarg_idx += 1; |
346 | } | |
3157f602 | 347 | |
dfeec247 XL |
348 | if !memory_locals.contains(local) { |
349 | // We don't have to cast or keep the argument in the alloca. | |
350 | // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead | |
351 | // of putting everything in allocas just so we can use llvm.dbg.declare. | |
353b0b11 | 352 | let local = |op| LocalRef::Operand(op); |
dfeec247 XL |
353 | match arg.mode { |
354 | PassMode::Ignore => { | |
fe692bf9 | 355 | return local(OperandRef::zero_sized(arg.layout)); |
dfeec247 XL |
356 | } |
357 | PassMode::Direct(_) => { | |
358 | let llarg = bx.get_param(llarg_idx); | |
359 | llarg_idx += 1; | |
360 | return local(OperandRef::from_immediate_or_packed_pair( | |
361 | bx, llarg, arg.layout, | |
362 | )); | |
363 | } | |
364 | PassMode::Pair(..) => { | |
365 | let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1)); | |
366 | llarg_idx += 2; | |
367 | ||
368 | return local(OperandRef { | |
369 | val: OperandValue::Pair(a, b), | |
370 | layout: arg.layout, | |
371 | }); | |
372 | } | |
373 | _ => {} | |
ff7c6d11 | 374 | } |
3157f602 | 375 | } |
ff7c6d11 | 376 | |
dfeec247 XL |
377 | if arg.is_sized_indirect() { |
378 | // Don't copy an indirect argument to an alloca, the caller | |
379 | // already put it in a temporary alloca and gave it up. | |
380 | // FIXME: lifetimes | |
381 | let llarg = bx.get_param(llarg_idx); | |
382 | llarg_idx += 1; | |
383 | LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout)) | |
384 | } else if arg.is_unsized_indirect() { | |
385 | // As the storage for the indirect argument lives during | |
386 | // the whole function call, we just copy the fat pointer. | |
387 | let llarg = bx.get_param(llarg_idx); | |
388 | llarg_idx += 1; | |
389 | let llextra = bx.get_param(llarg_idx); | |
390 | llarg_idx += 1; | |
391 | let indirect_operand = OperandValue::Pair(llarg, llextra); | |
392 | ||
393 | let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout); | |
394 | indirect_operand.store(bx, tmp); | |
395 | LocalRef::UnsizedPlace(tmp) | |
396 | } else { | |
397 | let tmp = PlaceRef::alloca(bx, arg.layout); | |
398 | bx.store_fn_arg(arg, &mut llarg_idx, tmp); | |
399 | LocalRef::Place(tmp) | |
400 | } | |
401 | }) | |
402 | .collect::<Vec<_>>(); | |
60c5eb7d XL |
403 | |
404 | if fx.instance.def.requires_caller_location(bx.tcx()) { | |
c295e0f8 XL |
405 | let mir_args = if let Some(num_untupled) = num_untupled { |
406 | // Subtract off the tupled argument that gets 'expanded' | |
407 | args.len() - 1 + num_untupled | |
408 | } else { | |
409 | args.len() | |
410 | }; | |
60c5eb7d | 411 | assert_eq!( |
dfeec247 | 412 | fx.fn_abi.args.len(), |
c295e0f8 XL |
413 | mir_args + 1, |
414 | "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR", | |
415 | fx.instance | |
60c5eb7d XL |
416 | ); |
417 | ||
418 | let arg = fx.fn_abi.args.last().unwrap(); | |
419 | match arg.mode { | |
420 | PassMode::Direct(_) => (), | |
421 | _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode), | |
422 | } | |
423 | ||
424 | fx.caller_location = Some(OperandRef { | |
425 | val: OperandValue::Immediate(bx.get_param(llarg_idx)), | |
426 | layout: arg.layout, | |
427 | }); | |
428 | } | |
429 | ||
430 | args | |
92a42be0 | 431 | } |