]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_target/src/abi/call/x86_64.rs
New upstream version 1.65.0+dfsg1
[rustc.git] / compiler / rustc_target / src / abi / call / x86_64.rs
CommitLineData
223e47cc
LB
1// The classification code for the x86_64 ABI is taken from the clay language
2// https://github.com/jckarter/clay/blob/master/compiler/src/externals.cpp
3
60c5eb7d 4use crate::abi::call::{ArgAbi, CastTarget, FnAbi, Reg, RegKind};
94222f64 5use crate::abi::{self, Abi, HasDataLayout, Size, TyAbiInterface, TyAndLayout};
970d7e83 6
2c00a5a8 7/// Classification of "eightbyte" components.
0731742a 8// N.B., the order of the variants is from general to specific,
2c00a5a8
XL
9// such that `unify(a, b)` is the "smaller" of `a` and `b`.
10#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
cc61c64b 11enum Class {
cc61c64b
XL
12 Int,
13 Sse,
dfeec247 14 SseUp,
970d7e83
LB
15}
16
cc61c64b
XL
17#[derive(Clone, Copy, Debug)]
18struct Memory;
970d7e83 19
abe05a73
XL
20// Currently supported vector size (AVX-512).
21const LARGEST_VECTOR_SIZE: usize = 512;
cc61c64b 22const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
223e47cc 23
dfeec247
XL
24fn classify_arg<'a, Ty, C>(
25 cx: &C,
26 arg: &ArgAbi<'a, Ty>,
27) -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory>
28where
94222f64
XL
29 Ty: TyAbiInterface<'a, C> + Copy,
30 C: HasDataLayout,
83c7162d 31{
dfeec247
XL
32 fn classify<'a, Ty, C>(
33 cx: &C,
ba9703b0 34 layout: TyAndLayout<'a, Ty>,
dfeec247
XL
35 cls: &mut [Option<Class>],
36 off: Size,
37 ) -> Result<(), Memory>
38 where
94222f64
XL
39 Ty: TyAbiInterface<'a, C> + Copy,
40 C: HasDataLayout,
83c7162d 41 {
a1dfa0c6 42 if !off.is_aligned(layout.align.abi) {
ff7c6d11 43 if !layout.is_zst() {
cc61c64b 44 return Err(Memory);
1a4d82fc 45 }
cc61c64b 46 return Ok(());
223e47cc 47 }
223e47cc 48
2c00a5a8 49 let mut c = match layout.abi {
83c7162d 50 Abi::Uninhabited => return Ok(()),
ff7c6d11 51
04454e1e 52 Abi::Scalar(scalar) => match scalar.primitive() {
dfeec247
XL
53 abi::Int(..) | abi::Pointer => Class::Int,
54 abi::F32 | abi::F64 => Class::Sse,
55 },
cc61c64b 56
83c7162d 57 Abi::Vector { .. } => Class::Sse,
2c00a5a8 58
74b04a01
XL
59 Abi::ScalarPair(..) | Abi::Aggregate { .. } => {
60 for i in 0..layout.fields.count() {
61 let field_off = off + layout.fields.offset(i);
62 classify(cx, layout.field(cx, i), cls, field_off)?;
63 }
64
65 match &layout.variants {
66 abi::Variants::Single { .. } => {}
67 abi::Variants::Multiple { variants, .. } => {
68 // Treat enum variants like union members.
69 for variant_idx in variants.indices() {
70 classify(cx, layout.for_variant(cx, variant_idx), cls, off)?;
71 }
cc61c64b 72 }
85aaf69f 73 }
74b04a01
XL
74
75 return Ok(());
76 }
2c00a5a8
XL
77 };
78
79 // Fill in `cls` for scalars (Int/Sse) and vectors (Sse).
80 let first = (off.bytes() / 8) as usize;
81 let last = ((off.bytes() + layout.size.bytes() - 1) / 8) as usize;
82 for cls in &mut cls[first..=last] {
83 *cls = Some(cls.map_or(c, |old| old.min(c)));
84
85 // Everything after the first Sse "eightbyte"
86 // component is the upper half of a register.
87 if c == Class::Sse {
88 c = Class::SseUp;
89 }
223e47cc 90 }
cc61c64b
XL
91
92 Ok(())
223e47cc
LB
93 }
94
ff7c6d11 95 let n = ((arg.layout.size.bytes() + 7) / 8) as usize;
cc61c64b
XL
96 if n > MAX_EIGHTBYTES {
97 return Err(Memory);
98 }
99
2c00a5a8 100 let mut cls = [None; MAX_EIGHTBYTES];
94b46f34 101 classify(cx, arg.layout, &mut cls, Size::ZERO)?;
cc61c64b 102 if n > 2 {
2c00a5a8 103 if cls[0] != Some(Class::Sse) {
cc61c64b
XL
104 return Err(Memory);
105 }
2c00a5a8 106 if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) {
cc61c64b
XL
107 return Err(Memory);
108 }
109 } else {
85aaf69f 110 let mut i = 0;
cc61c64b 111 while i < n {
2c00a5a8
XL
112 if cls[i] == Some(Class::SseUp) {
113 cls[i] = Some(Class::Sse);
114 } else if cls[i] == Some(Class::Sse) {
85aaf69f 115 i += 1;
dfeec247
XL
116 while i != n && cls[i] == Some(Class::SseUp) {
117 i += 1;
118 }
970d7e83 119 } else {
cc61c64b 120 i += 1;
223e47cc
LB
121 }
122 }
123 }
124
cc61c64b 125 Ok(cls)
223e47cc
LB
126}
127
2c00a5a8 128fn reg_component(cls: &[Option<Class>], i: &mut usize, size: Size) -> Option<Reg> {
cc61c64b
XL
129 if *i >= cls.len() {
130 return None;
223e47cc
LB
131 }
132
cc61c64b 133 match cls[*i] {
2c00a5a8
XL
134 None => None,
135 Some(Class::Int) => {
cc61c64b 136 *i += 1;
dfeec247 137 Some(if size.bytes() < 8 { Reg { kind: RegKind::Integer, size } } else { Reg::i64() })
223e47cc 138 }
2c00a5a8 139 Some(Class::Sse) => {
dfeec247
XL
140 let vec_len =
141 1 + cls[*i + 1..].iter().take_while(|&&c| c == Some(Class::SseUp)).count();
cc61c64b
XL
142 *i += vec_len;
143 Some(if vec_len == 1 {
ff7c6d11 144 match size.bytes() {
cc61c64b 145 4 => Reg::f32(),
dfeec247 146 _ => Reg::f64(),
54a0048b 147 }
223e47cc 148 } else {
dfeec247 149 Reg { kind: RegKind::Vector, size: Size::from_bytes(8) * (vec_len as u64) }
cc61c64b 150 })
1a4d82fc 151 }
dfeec247 152 Some(c) => unreachable!("reg_component: unhandled class {:?}", c),
223e47cc 153 }
cc61c64b
XL
154}
155
2c00a5a8 156fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget {
cc61c64b
XL
157 let mut i = 0;
158 let lo = reg_component(cls, &mut i, size).unwrap();
ff7c6d11 159 let offset = Size::from_bytes(8) * (i as u64);
2c00a5a8
XL
160 let mut target = CastTarget::from(lo);
161 if size > offset {
162 if let Some(hi) = reg_component(cls, &mut i, size - offset) {
0531ce1d 163 target = CastTarget::pair(lo, hi);
2c00a5a8
XL
164 }
165 }
94b46f34 166 assert_eq!(reg_component(cls, &mut i, Size::ZERO), None);
cc61c64b
XL
167 target
168}
223e47cc 169
dc9dc135
XL
170const MAX_INT_REGS: usize = 6; // RDI, RSI, RDX, RCX, R8, R9
171const MAX_SSE_REGS: usize = 8; // XMM0-7
172
60c5eb7d 173pub fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
dfeec247 174where
94222f64
XL
175 Ty: TyAbiInterface<'a, C> + Copy,
176 C: HasDataLayout,
83c7162d 177{
dc9dc135
XL
178 let mut int_regs = MAX_INT_REGS;
179 let mut sse_regs = MAX_SSE_REGS;
1a4d82fc 180
60c5eb7d 181 let mut x86_64_arg_or_ret = |arg: &mut ArgAbi<'a, Ty>, is_arg: bool| {
2c00a5a8 182 let mut cls_or_mem = classify_arg(cx, arg);
cc61c64b 183
2c00a5a8
XL
184 if is_arg {
185 if let Ok(cls) = cls_or_mem {
dc9dc135
XL
186 let mut needed_int = 0;
187 let mut needed_sse = 0;
136023e0 188 for c in cls {
cc61c64b 189 match c {
2c00a5a8
XL
190 Some(Class::Int) => needed_int += 1,
191 Some(Class::Sse) => needed_sse += 1,
cc61c64b
XL
192 _ => {}
193 }
194 }
dc9dc135
XL
195 match (int_regs.checked_sub(needed_int), sse_regs.checked_sub(needed_sse)) {
196 (Some(left_int), Some(left_sse)) => {
197 int_regs = left_int;
198 sse_regs = left_sse;
199 }
200 _ => {
201 // Not enough registers for this argument, so it will be
202 // passed on the stack, but we only mark aggregates
203 // explicitly as indirect `byval` arguments, as LLVM will
204 // automatically put immediates on the stack itself.
205 if arg.layout.is_aggregate() {
206 cls_or_mem = Err(Memory);
207 }
208 }
2c00a5a8 209 }
cc61c64b 210 }
2c00a5a8 211 }
cc61c64b 212
2c00a5a8
XL
213 match cls_or_mem {
214 Err(Memory) => {
215 if is_arg {
216 arg.make_indirect_byval();
217 } else {
218 // `sret` parameter thus one less integer register available
219 arg.make_indirect();
dc9dc135
XL
220 // NOTE(eddyb) return is handled first, so no registers
221 // should've been used yet.
222 assert_eq!(int_regs, MAX_INT_REGS);
2c00a5a8
XL
223 int_regs -= 1;
224 }
cc61c64b 225 }
2c00a5a8
XL
226 Ok(ref cls) => {
227 // split into sized chunks passed individually
2c00a5a8
XL
228 if arg.layout.is_aggregate() {
229 let size = arg.layout.size;
230 arg.cast_to(cast_target(cls, size))
231 } else {
232 arg.extend_integer_width_to(32);
233 }
b039eaaf 234 }
cc61c64b
XL
235 }
236 };
237
60c5eb7d
XL
238 if !fn_abi.ret.is_ignore() {
239 x86_64_arg_or_ret(&mut fn_abi.ret, false);
54a0048b 240 }
1a4d82fc 241
f2b60f7d 242 for arg in fn_abi.args.iter_mut() {
dfeec247
XL
243 if arg.is_ignore() {
244 continue;
245 }
60c5eb7d 246 x86_64_arg_or_ret(arg, true);
b039eaaf 247 }
223e47cc 248}