1 use crate::builder
::Builder
;
2 use crate::context
::CodegenCx
;
3 use crate::llvm
::{self, AttributePlace}
;
4 use crate::type_
::Type
;
5 use crate::type_of
::LayoutLlvmExt
;
6 use crate::value
::Value
;
8 use rustc_codegen_ssa
::mir
::operand
::OperandValue
;
9 use rustc_codegen_ssa
::mir
::place
::PlaceRef
;
10 use rustc_codegen_ssa
::traits
::*;
11 use rustc_codegen_ssa
::MemFlags
;
12 use rustc_middle
::bug
;
13 pub use rustc_middle
::ty
::layout
::{FAT_PTR_ADDR, FAT_PTR_EXTRA}
;
14 use rustc_middle
::ty
::Ty
;
15 use rustc_target
::abi
::call
::ArgAbi
;
16 pub use rustc_target
::abi
::call
::*;
17 use rustc_target
::abi
::{self, HasDataLayout, Int, LayoutOf}
;
18 pub use rustc_target
::spec
::abi
::Abi
;
22 macro_rules
! for_each_kind
{
23 ($flags
: ident
, $f
: ident
, $
($kind
: ident
),+) => ({
24 $
(if $flags
.contains(ArgAttribute
::$kind
) { $f(llvm::Attribute::$kind) }
)+
28 trait ArgAttributeExt
{
29 fn for_each_kind
<F
>(&self, f
: F
)
31 F
: FnMut(llvm
::Attribute
);
34 impl ArgAttributeExt
for ArgAttribute
{
35 fn for_each_kind
<F
>(&self, mut f
: F
)
37 F
: FnMut(llvm
::Attribute
),
39 for_each_kind
!(self, f
, NoAlias
, NoCapture
, NonNull
, ReadOnly
, InReg
)
43 pub trait ArgAttributesExt
{
44 fn apply_attrs_to_llfn(&self, idx
: AttributePlace
, llfn
: &Value
);
45 fn apply_attrs_to_callsite(&self, idx
: AttributePlace
, callsite
: &Value
);
48 impl ArgAttributesExt
for ArgAttributes
{
49 fn apply_attrs_to_llfn(&self, idx
: AttributePlace
, llfn
: &Value
) {
50 let mut regular
= self.regular
;
52 let deref
= self.pointee_size
.bytes();
54 if regular
.contains(ArgAttribute
::NonNull
) {
55 llvm
::LLVMRustAddDereferenceableAttr(llfn
, idx
.as_uint(), deref
);
57 llvm
::LLVMRustAddDereferenceableOrNullAttr(llfn
, idx
.as_uint(), deref
);
59 regular
-= ArgAttribute
::NonNull
;
61 if let Some(align
) = self.pointee_align
{
62 llvm
::LLVMRustAddAlignmentAttr(llfn
, idx
.as_uint(), align
.bytes() as u32);
64 regular
.for_each_kind(|attr
| attr
.apply_llfn(idx
, llfn
));
66 ArgExtension
::None
=> {}
67 ArgExtension
::Zext
=> {
68 llvm
::Attribute
::ZExt
.apply_llfn(idx
, llfn
);
70 ArgExtension
::Sext
=> {
71 llvm
::Attribute
::SExt
.apply_llfn(idx
, llfn
);
77 fn apply_attrs_to_callsite(&self, idx
: AttributePlace
, callsite
: &Value
) {
78 let mut regular
= self.regular
;
80 let deref
= self.pointee_size
.bytes();
82 if regular
.contains(ArgAttribute
::NonNull
) {
83 llvm
::LLVMRustAddDereferenceableCallSiteAttr(callsite
, idx
.as_uint(), deref
);
85 llvm
::LLVMRustAddDereferenceableOrNullCallSiteAttr(
91 regular
-= ArgAttribute
::NonNull
;
93 if let Some(align
) = self.pointee_align
{
94 llvm
::LLVMRustAddAlignmentCallSiteAttr(
100 regular
.for_each_kind(|attr
| attr
.apply_callsite(idx
, callsite
));
102 ArgExtension
::None
=> {}
103 ArgExtension
::Zext
=> {
104 llvm
::Attribute
::ZExt
.apply_callsite(idx
, callsite
);
106 ArgExtension
::Sext
=> {
107 llvm
::Attribute
::SExt
.apply_callsite(idx
, callsite
);
115 fn llvm_type(&self, cx
: &CodegenCx
<'ll
, '_
>) -> &'ll Type
;
118 impl LlvmType
for Reg
{
119 fn llvm_type(&self, cx
: &CodegenCx
<'ll
, '_
>) -> &'ll Type
{
121 RegKind
::Integer
=> cx
.type_ix(self.size
.bits()),
122 RegKind
::Float
=> match self.size
.bits() {
125 _
=> bug
!("unsupported float: {:?}", self),
127 RegKind
::Vector
=> cx
.type_vector(cx
.type_i8(), self.size
.bytes()),
132 impl LlvmType
for CastTarget
{
133 fn llvm_type(&self, cx
: &CodegenCx
<'ll
, '_
>) -> &'ll Type
{
134 let rest_ll_unit
= self.rest
.unit
.llvm_type(cx
);
135 let (rest_count
, rem_bytes
) = if self.rest
.unit
.size
.bytes() == 0 {
139 self.rest
.total
.bytes() / self.rest
.unit
.size
.bytes(),
140 self.rest
.total
.bytes() % self.rest
.unit
.size
.bytes(),
144 if self.prefix
.iter().all(|x
| x
.is_none()) {
145 // Simplify to a single unit when there is no prefix and size <= unit size
146 if self.rest
.total
<= self.rest
.unit
.size
{
150 // Simplify to array when all chunks are the same size and type
152 return cx
.type_array(rest_ll_unit
, rest_count
);
156 // Create list of fields in the main structure
157 let mut args
: Vec
<_
> = self
160 .flat_map(|option_kind
| {
161 option_kind
.map(|kind
| Reg { kind, size: self.prefix_chunk_size }
.llvm_type(cx
))
163 .chain((0..rest_count
).map(|_
| rest_ll_unit
))
166 // Append final integer
168 // Only integers can be really split further.
169 assert_eq
!(self.rest
.unit
.kind
, RegKind
::Integer
);
170 args
.push(cx
.type_ix(rem_bytes
* 8));
173 cx
.type_struct(&args
, false)
177 pub trait ArgAbiExt
<'ll
, 'tcx
> {
178 fn memory_ty(&self, cx
: &CodegenCx
<'ll
, 'tcx
>) -> &'ll Type
;
181 bx
: &mut Builder
<'_
, 'll
, 'tcx
>,
183 dst
: PlaceRef
<'tcx
, &'ll Value
>,
187 bx
: &mut Builder
<'_
, 'll
, 'tcx
>,
189 dst
: PlaceRef
<'tcx
, &'ll Value
>,
193 impl ArgAbiExt
<'ll
, 'tcx
> for ArgAbi
<'tcx
, Ty
<'tcx
>> {
194 /// Gets the LLVM type for a place of the original Rust type of
195 /// this argument/return, i.e., the result of `type_of::type_of`.
196 fn memory_ty(&self, cx
: &CodegenCx
<'ll
, 'tcx
>) -> &'ll Type
{
197 self.layout
.llvm_type(cx
)
200 /// Stores a direct/indirect value described by this ArgAbi into a
201 /// place for the original Rust type of this argument/return.
202 /// Can be used for both storing formal arguments into Rust variables
203 /// or results of call/invoke instructions into their destinations.
206 bx
: &mut Builder
<'_
, 'll
, 'tcx
>,
208 dst
: PlaceRef
<'tcx
, &'ll Value
>,
210 if self.is_ignore() {
213 if self.is_sized_indirect() {
214 OperandValue
::Ref(val
, None
, self.layout
.align
.abi
).store(bx
, dst
)
215 } else if self.is_unsized_indirect() {
216 bug
!("unsized `ArgAbi` must be handled through `store_fn_arg`");
217 } else if let PassMode
::Cast(cast
) = self.mode
{
218 // FIXME(eddyb): Figure out when the simpler Store is safe, clang
219 // uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
220 let can_store_through_cast_ptr
= false;
221 if can_store_through_cast_ptr
{
222 let cast_ptr_llty
= bx
.type_ptr_to(cast
.llvm_type(bx
));
223 let cast_dst
= bx
.pointercast(dst
.llval
, cast_ptr_llty
);
224 bx
.store(val
, cast_dst
, self.layout
.align
.abi
);
226 // The actual return type is a struct, but the ABI
227 // adaptation code has cast it into some scalar type. The
228 // code that follows is the only reliable way I have
229 // found to do a transform like i64 -> {i32,i32}.
230 // Basically we dump the data onto the stack then memcpy it.
232 // Other approaches I tried:
233 // - Casting rust ret pointer to the foreign type and using Store
234 // is (a) unsafe if size of foreign type > size of rust type and
235 // (b) runs afoul of strict aliasing rules, yielding invalid
236 // assembly under -O (specifically, the store gets removed).
237 // - Truncating foreign type to correct integral type and then
238 // bitcasting to the struct type yields invalid cast errors.
240 // We instead thus allocate some scratch space...
241 let scratch_size
= cast
.size(bx
);
242 let scratch_align
= cast
.align(bx
);
243 let llscratch
= bx
.alloca(cast
.llvm_type(bx
), scratch_align
);
244 bx
.lifetime_start(llscratch
, scratch_size
);
246 // ... where we first store the value...
247 bx
.store(val
, llscratch
, scratch_align
);
249 // ... and then memcpy it to the intended destination.
252 self.layout
.align
.abi
,
255 bx
.const_usize(self.layout
.size
.bytes()),
259 bx
.lifetime_end(llscratch
, scratch_size
);
262 OperandValue
::Immediate(val
).store(bx
, dst
);
268 bx
: &mut Builder
<'a
, 'll
, 'tcx
>,
270 dst
: PlaceRef
<'tcx
, &'ll Value
>,
273 let val
= llvm
::get_param(bx
.llfn(), *idx
as c_uint
);
278 PassMode
::Ignore
=> {}
279 PassMode
::Pair(..) => {
280 OperandValue
::Pair(next(), next()).store(bx
, dst
);
282 PassMode
::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ }
=> {
283 OperandValue
::Ref(next(), Some(next()), self.layout
.align
.abi
).store(bx
, dst
);
286 | PassMode
::Indirect { attrs: _, extra_attrs: None, on_stack: _ }
287 | PassMode
::Cast(_
) => {
288 let next_arg
= next();
289 self.store(bx
, next_arg
, dst
);
295 impl ArgAbiMethods
<'tcx
> for Builder
<'a
, 'll
, 'tcx
> {
298 arg_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
300 dst
: PlaceRef
<'tcx
, Self::Value
>,
302 arg_abi
.store_fn_arg(self, idx
, dst
)
306 arg_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
308 dst
: PlaceRef
<'tcx
, &'ll Value
>,
310 arg_abi
.store(self, val
, dst
)
312 fn arg_memory_ty(&self, arg_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>) -> &'ll Type
{
313 arg_abi
.memory_ty(self)
317 pub trait FnAbiLlvmExt
<'tcx
> {
318 fn llvm_type(&self, cx
: &CodegenCx
<'ll
, 'tcx
>) -> &'ll Type
;
319 fn ptr_to_llvm_type(&self, cx
: &CodegenCx
<'ll
, 'tcx
>) -> &'ll Type
;
320 fn llvm_cconv(&self) -> llvm
::CallConv
;
321 fn apply_attrs_llfn(&self, cx
: &CodegenCx
<'ll
, 'tcx
>, llfn
: &'ll Value
);
322 fn apply_attrs_callsite(&self, bx
: &mut Builder
<'a
, 'll
, 'tcx
>, callsite
: &'ll Value
);
325 impl<'tcx
> FnAbiLlvmExt
<'tcx
> for FnAbi
<'tcx
, Ty
<'tcx
>> {
326 fn llvm_type(&self, cx
: &CodegenCx
<'ll
, 'tcx
>) -> &'ll Type
{
327 let args_capacity
: usize = self.args
.iter().map(|arg
|
328 if arg
.pad
.is_some() { 1 }
else { 0 }
+
329 if let PassMode
::Pair(_
, _
) = arg
.mode { 2 }
else { 1 }
331 let mut llargument_tys
= Vec
::with_capacity(
332 if let PassMode
::Indirect { .. }
= self.ret
.mode { 1 }
else { 0 }
+ args_capacity
,
335 let llreturn_ty
= match self.ret
.mode
{
336 PassMode
::Ignore
=> cx
.type_void(),
337 PassMode
::Direct(_
) | PassMode
::Pair(..) => self.ret
.layout
.immediate_llvm_type(cx
),
338 PassMode
::Cast(cast
) => cast
.llvm_type(cx
),
339 PassMode
::Indirect { .. }
=> {
340 llargument_tys
.push(cx
.type_ptr_to(self.ret
.memory_ty(cx
)));
345 for arg
in &self.args
{
347 if let Some(ty
) = arg
.pad
{
348 llargument_tys
.push(ty
.llvm_type(cx
));
351 let llarg_ty
= match arg
.mode
{
352 PassMode
::Ignore
=> continue,
353 PassMode
::Direct(_
) => arg
.layout
.immediate_llvm_type(cx
),
354 PassMode
::Pair(..) => {
355 llargument_tys
.push(arg
.layout
.scalar_pair_element_llvm_type(cx
, 0, true));
356 llargument_tys
.push(arg
.layout
.scalar_pair_element_llvm_type(cx
, 1, true));
359 PassMode
::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ }
=> {
360 let ptr_ty
= cx
.tcx
.mk_mut_ptr(arg
.layout
.ty
);
361 let ptr_layout
= cx
.layout_of(ptr_ty
);
362 llargument_tys
.push(ptr_layout
.scalar_pair_element_llvm_type(cx
, 0, true));
363 llargument_tys
.push(ptr_layout
.scalar_pair_element_llvm_type(cx
, 1, true));
366 PassMode
::Cast(cast
) => cast
.llvm_type(cx
),
367 PassMode
::Indirect { attrs: _, extra_attrs: None, on_stack: _ }
=> {
368 cx
.type_ptr_to(arg
.memory_ty(cx
))
371 llargument_tys
.push(llarg_ty
);
375 cx
.type_variadic_func(&llargument_tys
, llreturn_ty
)
377 cx
.type_func(&llargument_tys
, llreturn_ty
)
381 fn ptr_to_llvm_type(&self, cx
: &CodegenCx
<'ll
, 'tcx
>) -> &'ll Type
{
383 llvm
::LLVMPointerType(
385 cx
.data_layout().instruction_address_space
.0 as c_uint
,
390 fn llvm_cconv(&self) -> llvm
::CallConv
{
392 Conv
::C
| Conv
::Rust
| Conv
::CCmseNonSecureCall
=> llvm
::CCallConv
,
393 Conv
::AmdGpuKernel
=> llvm
::AmdGpuKernel
,
394 Conv
::AvrInterrupt
=> llvm
::AvrInterrupt
,
395 Conv
::AvrNonBlockingInterrupt
=> llvm
::AvrNonBlockingInterrupt
,
396 Conv
::ArmAapcs
=> llvm
::ArmAapcsCallConv
,
397 Conv
::Msp430Intr
=> llvm
::Msp430Intr
,
398 Conv
::PtxKernel
=> llvm
::PtxKernel
,
399 Conv
::X86Fastcall
=> llvm
::X86FastcallCallConv
,
400 Conv
::X86Intr
=> llvm
::X86_Intr
,
401 Conv
::X86Stdcall
=> llvm
::X86StdcallCallConv
,
402 Conv
::X86ThisCall
=> llvm
::X86_ThisCall
,
403 Conv
::X86VectorCall
=> llvm
::X86_VectorCall
,
404 Conv
::X86_64SysV
=> llvm
::X86_64_SysV
,
405 Conv
::X86_64Win64
=> llvm
::X86_64_Win64
,
409 fn apply_attrs_llfn(&self, cx
: &CodegenCx
<'ll
, 'tcx
>, llfn
: &'ll Value
) {
410 // FIXME(eddyb) can this also be applied to callsites?
411 if self.ret
.layout
.abi
.is_uninhabited() {
412 llvm
::Attribute
::NoReturn
.apply_llfn(llvm
::AttributePlace
::Function
, llfn
);
415 // FIXME(eddyb, wesleywiser): apply this to callsites as well?
416 if !self.can_unwind
{
417 llvm
::Attribute
::NoUnwind
.apply_llfn(llvm
::AttributePlace
::Function
, llfn
);
421 let mut apply
= |attrs
: &ArgAttributes
| {
422 attrs
.apply_attrs_to_llfn(llvm
::AttributePlace
::Argument(i
), llfn
);
426 match self.ret
.mode
{
427 PassMode
::Direct(ref attrs
) => {
428 attrs
.apply_attrs_to_llfn(llvm
::AttributePlace
::ReturnValue
, llfn
);
430 PassMode
::Indirect { ref attrs, extra_attrs: _, on_stack }
=> {
432 let i
= apply(attrs
);
434 llvm
::LLVMRustAddStructRetAttr(
436 llvm
::AttributePlace
::Argument(i
).as_uint(),
437 self.ret
.layout
.llvm_type(cx
),
443 for arg
in &self.args
{
444 if arg
.pad
.is_some() {
445 apply(&ArgAttributes
::new());
448 PassMode
::Ignore
=> {}
449 PassMode
::Indirect { ref attrs, extra_attrs: None, on_stack: true }
=> {
450 let i
= apply(attrs
);
452 llvm
::LLVMRustAddByValAttr(
454 llvm
::AttributePlace
::Argument(i
).as_uint(),
455 arg
.layout
.llvm_type(cx
),
459 PassMode
::Direct(ref attrs
)
460 | PassMode
::Indirect { ref attrs, extra_attrs: None, on_stack: false }
=> {
463 PassMode
::Indirect { ref attrs, extra_attrs: Some(ref extra_attrs), on_stack }
=> {
468 PassMode
::Pair(ref a
, ref b
) => {
472 PassMode
::Cast(_
) => {
473 apply(&ArgAttributes
::new());
479 fn apply_attrs_callsite(&self, bx
: &mut Builder
<'a
, 'll
, 'tcx
>, callsite
: &'ll Value
) {
480 // FIXME(wesleywiser, eddyb): We should apply `nounwind` and `noreturn` as appropriate to this callsite.
483 let mut apply
= |attrs
: &ArgAttributes
| {
484 attrs
.apply_attrs_to_callsite(llvm
::AttributePlace
::Argument(i
), callsite
);
488 match self.ret
.mode
{
489 PassMode
::Direct(ref attrs
) => {
490 attrs
.apply_attrs_to_callsite(llvm
::AttributePlace
::ReturnValue
, callsite
);
492 PassMode
::Indirect { ref attrs, extra_attrs: _, on_stack }
=> {
494 let i
= apply(attrs
);
496 llvm
::LLVMRustAddStructRetCallSiteAttr(
498 llvm
::AttributePlace
::Argument(i
).as_uint(),
499 self.ret
.layout
.llvm_type(bx
),
505 if let abi
::Abi
::Scalar(ref scalar
) = self.ret
.layout
.abi
{
506 // If the value is a boolean, the range is 0..2 and that ultimately
507 // become 0..0 when the type becomes i1, which would be rejected
508 // by the LLVM verifier.
509 if let Int(..) = scalar
.value
{
510 if !scalar
.is_bool() {
511 let range
= scalar
.valid_range_exclusive(bx
);
512 if range
.start
!= range
.end
{
513 bx
.range_metadata(callsite
, range
);
518 for arg
in &self.args
{
519 if arg
.pad
.is_some() {
520 apply(&ArgAttributes
::new());
523 PassMode
::Ignore
=> {}
524 PassMode
::Indirect { ref attrs, extra_attrs: None, on_stack: true }
=> {
525 let i
= apply(attrs
);
527 llvm
::LLVMRustAddByValCallSiteAttr(
529 llvm
::AttributePlace
::Argument(i
).as_uint(),
530 arg
.layout
.llvm_type(bx
),
534 PassMode
::Direct(ref attrs
)
535 | PassMode
::Indirect { ref attrs, extra_attrs: None, on_stack: false }
=> {
540 extra_attrs
: Some(ref extra_attrs
),
546 PassMode
::Pair(ref a
, ref b
) => {
550 PassMode
::Cast(_
) => {
551 apply(&ArgAttributes
::new());
556 let cconv
= self.llvm_cconv();
557 if cconv
!= llvm
::CCallConv
{
558 llvm
::SetInstructionCallConv(callsite
, cconv
);
561 if self.conv
== Conv
::CCmseNonSecureCall
{
562 // This will probably get ignored on all targets but those supporting the TrustZone-M
563 // extension (thumbv8m targets).
565 llvm
::AddCallSiteAttrString(
567 llvm
::AttributePlace
::Function
,
568 cstr
::cstr
!("cmse_nonsecure_call"),
575 impl AbiBuilderMethods
<'tcx
> for Builder
<'a
, 'll
, 'tcx
> {
576 fn apply_attrs_callsite(&mut self, fn_abi
: &FnAbi
<'tcx
, Ty
<'tcx
>>, callsite
: Self::Value
) {
577 fn_abi
.apply_attrs_callsite(self, callsite
)
580 fn get_param(&self, index
: usize) -> Self::Value
{
581 llvm
::get_param(self.llfn(), index
as c_uint
)