1 use super::{InlineAsmArch, InlineAsmType}
;
2 use crate::spec
::{RelocModel, Target}
;
3 use rustc_data_structures
::stable_set
::FxHashSet
;
4 use rustc_macros
::HashStable_Generic
;
5 use rustc_span
::Symbol
;
9 X86 X86InlineAsmRegClass
{
24 impl X86InlineAsmRegClass
{
25 pub fn valid_modifiers(self, arch
: super::InlineAsmArch
) -> &'
static [char] {
28 if arch
== InlineAsmArch
::X86_64
{
35 if arch
== InlineAsmArch
::X86_64
{
36 &['l'
, 'h'
, 'x'
, 'e'
, 'r'
]
41 Self::reg_byte
=> &[],
42 Self::xmm_reg
| Self::ymm_reg
| Self::zmm_reg
=> &['x'
, 'y'
, 'z'
],
43 Self::kreg
| Self::kreg0
=> &[],
44 Self::mmx_reg
| Self::x87_reg
=> &[],
49 pub fn suggest_class(self, _arch
: InlineAsmArch
, ty
: InlineAsmType
) -> Option
<Self> {
51 Self::reg
| Self::reg_abcd
if ty
.size().bits() == 8 => Some(Self::reg_byte
),
56 pub fn suggest_modifier(
60 ) -> Option
<(char, &'
static str)> {
62 Self::reg
=> match ty
.size().bits() {
63 16 => Some(('x'
, "ax")),
64 32 if arch
== InlineAsmArch
::X86_64
=> Some(('e'
, "eax")),
67 Self::reg_abcd
=> match ty
.size().bits() {
68 16 => Some(('x'
, "ax")),
69 32 if arch
== InlineAsmArch
::X86_64
=> Some(('e'
, "eax")),
72 Self::reg_byte
=> None
,
73 Self::xmm_reg
=> None
,
74 Self::ymm_reg
=> match ty
.size().bits() {
76 _
=> Some(('x'
, "xmm0")),
78 Self::zmm_reg
=> match ty
.size().bits() {
80 256 => Some(('y'
, "ymm0")),
81 _
=> Some(('x'
, "xmm0")),
83 Self::kreg
| Self::kreg0
=> None
,
84 Self::mmx_reg
| Self::x87_reg
=> None
,
85 Self::tmm_reg
=> None
,
89 pub fn default_modifier(self, arch
: InlineAsmArch
) -> Option
<(char, &'
static str)> {
91 Self::reg
| Self::reg_abcd
=> {
92 if arch
== InlineAsmArch
::X86_64
{
98 Self::reg_byte
=> None
,
99 Self::xmm_reg
=> Some(('x'
, "xmm0")),
100 Self::ymm_reg
=> Some(('y'
, "ymm0")),
101 Self::zmm_reg
=> Some(('z'
, "zmm0")),
102 Self::kreg
| Self::kreg0
=> None
,
103 Self::mmx_reg
| Self::x87_reg
=> None
,
104 Self::tmm_reg
=> None
,
108 pub fn supported_types(
111 ) -> &'
static [(InlineAsmType
, Option
<Symbol
>)] {
113 Self::reg
| Self::reg_abcd
=> {
114 if arch
== InlineAsmArch
::X86_64
{
115 types
! { _: I16, I32, I64, F32, F64; }
117 types
! { _: I16, I32, F32; }
120 Self::reg_byte
=> types
! { _: I8; }
,
121 Self::xmm_reg
=> types
! {
122 sse
: I32
, I64
, F32
, F64
,
123 VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF32(4), VecF64(2);
125 Self::ymm_reg
=> types
! {
126 avx
: I32
, I64
, F32
, F64
,
127 VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF32(4), VecF64(2),
128 VecI8(32), VecI16(16), VecI32(8), VecI64(4), VecF32(8), VecF64(4);
130 Self::zmm_reg
=> types
! {
131 avx512f
: I32
, I64
, F32
, F64
,
132 VecI8(16), VecI16(8), VecI32(4), VecI64(2), VecF32(4), VecF64(2),
133 VecI8(32), VecI16(16), VecI32(8), VecI64(4), VecF32(8), VecF64(4),
134 VecI8(64), VecI16(32), VecI32(16), VecI64(8), VecF32(16), VecF64(8);
136 Self::kreg
=> types
! {
141 Self::mmx_reg
| Self::x87_reg
=> &[],
142 Self::tmm_reg
=> &[],
149 _reloc_model
: RelocModel
,
150 _target_features
: &FxHashSet
<Symbol
>,
153 ) -> Result
<(), &'
static str> {
155 InlineAsmArch
::X86
=> Err("register is only available on x86_64"),
156 InlineAsmArch
::X86_64
=> Ok(()),
163 _reloc_model
: RelocModel
,
164 _target_features
: &FxHashSet
<Symbol
>,
167 ) -> Result
<(), &'
static str> {
169 InlineAsmArch
::X86_64
=> Err("high byte registers cannot be used as an operand on x86_64"),
176 _reloc_model
: RelocModel
,
177 _target_features
: &FxHashSet
<Symbol
>,
180 ) -> Result
<(), &'
static str> {
182 InlineAsmArch
::X86
=> Ok(()),
183 InlineAsmArch
::X86_64
=> {
184 Err("rbx is used internally by LLVM and cannot be used as an operand for inline asm")
192 _reloc_model
: RelocModel
,
193 _target_features
: &FxHashSet
<Symbol
>,
196 ) -> Result
<(), &'
static str> {
198 InlineAsmArch
::X86
=> {
199 Err("esi is used internally by LLVM and cannot be used as an operand for inline asm")
201 InlineAsmArch
::X86_64
=> Ok(()),
207 X86 X86InlineAsmReg X86InlineAsmRegClass
{
208 ax
: reg
, reg_abcd
= ["ax", "eax", "rax"],
209 bx
: reg
, reg_abcd
= ["bx", "ebx", "rbx"] % rbx_reserved
,
210 cx
: reg
, reg_abcd
= ["cx", "ecx", "rcx"],
211 dx
: reg
, reg_abcd
= ["dx", "edx", "rdx"],
212 si
: reg
= ["si", "esi", "rsi"] % esi_reserved
,
213 di
: reg
= ["di", "edi", "rdi"],
214 r8
: reg
= ["r8", "r8w", "r8d"] % x86_64_only
,
215 r9
: reg
= ["r9", "r9w", "r9d"] % x86_64_only
,
216 r10
: reg
= ["r10", "r10w", "r10d"] % x86_64_only
,
217 r11
: reg
= ["r11", "r11w", "r11d"] % x86_64_only
,
218 r12
: reg
= ["r12", "r12w", "r12d"] % x86_64_only
,
219 r13
: reg
= ["r13", "r13w", "r13d"] % x86_64_only
,
220 r14
: reg
= ["r14", "r14w", "r14d"] % x86_64_only
,
221 r15
: reg
= ["r15", "r15w", "r15d"] % x86_64_only
,
222 al
: reg_byte
= ["al"],
223 ah
: reg_byte
= ["ah"] % high_byte
,
224 bl
: reg_byte
= ["bl"],
225 bh
: reg_byte
= ["bh"] % high_byte
,
226 cl
: reg_byte
= ["cl"],
227 ch
: reg_byte
= ["ch"] % high_byte
,
228 dl
: reg_byte
= ["dl"],
229 dh
: reg_byte
= ["dh"] % high_byte
,
230 sil
: reg_byte
= ["sil"] % x86_64_only
,
231 dil
: reg_byte
= ["dil"] % x86_64_only
,
232 r8b
: reg_byte
= ["r8b"] % x86_64_only
,
233 r9b
: reg_byte
= ["r9b"] % x86_64_only
,
234 r10b
: reg_byte
= ["r10b"] % x86_64_only
,
235 r11b
: reg_byte
= ["r11b"] % x86_64_only
,
236 r12b
: reg_byte
= ["r12b"] % x86_64_only
,
237 r13b
: reg_byte
= ["r13b"] % x86_64_only
,
238 r14b
: reg_byte
= ["r14b"] % x86_64_only
,
239 r15b
: reg_byte
= ["r15b"] % x86_64_only
,
240 xmm0
: xmm_reg
= ["xmm0"],
241 xmm1
: xmm_reg
= ["xmm1"],
242 xmm2
: xmm_reg
= ["xmm2"],
243 xmm3
: xmm_reg
= ["xmm3"],
244 xmm4
: xmm_reg
= ["xmm4"],
245 xmm5
: xmm_reg
= ["xmm5"],
246 xmm6
: xmm_reg
= ["xmm6"],
247 xmm7
: xmm_reg
= ["xmm7"],
248 xmm8
: xmm_reg
= ["xmm8"] % x86_64_only
,
249 xmm9
: xmm_reg
= ["xmm9"] % x86_64_only
,
250 xmm10
: xmm_reg
= ["xmm10"] % x86_64_only
,
251 xmm11
: xmm_reg
= ["xmm11"] % x86_64_only
,
252 xmm12
: xmm_reg
= ["xmm12"] % x86_64_only
,
253 xmm13
: xmm_reg
= ["xmm13"] % x86_64_only
,
254 xmm14
: xmm_reg
= ["xmm14"] % x86_64_only
,
255 xmm15
: xmm_reg
= ["xmm15"] % x86_64_only
,
256 ymm0
: ymm_reg
= ["ymm0"],
257 ymm1
: ymm_reg
= ["ymm1"],
258 ymm2
: ymm_reg
= ["ymm2"],
259 ymm3
: ymm_reg
= ["ymm3"],
260 ymm4
: ymm_reg
= ["ymm4"],
261 ymm5
: ymm_reg
= ["ymm5"],
262 ymm6
: ymm_reg
= ["ymm6"],
263 ymm7
: ymm_reg
= ["ymm7"],
264 ymm8
: ymm_reg
= ["ymm8"] % x86_64_only
,
265 ymm9
: ymm_reg
= ["ymm9"] % x86_64_only
,
266 ymm10
: ymm_reg
= ["ymm10"] % x86_64_only
,
267 ymm11
: ymm_reg
= ["ymm11"] % x86_64_only
,
268 ymm12
: ymm_reg
= ["ymm12"] % x86_64_only
,
269 ymm13
: ymm_reg
= ["ymm13"] % x86_64_only
,
270 ymm14
: ymm_reg
= ["ymm14"] % x86_64_only
,
271 ymm15
: ymm_reg
= ["ymm15"] % x86_64_only
,
272 zmm0
: zmm_reg
= ["zmm0"],
273 zmm1
: zmm_reg
= ["zmm1"],
274 zmm2
: zmm_reg
= ["zmm2"],
275 zmm3
: zmm_reg
= ["zmm3"],
276 zmm4
: zmm_reg
= ["zmm4"],
277 zmm5
: zmm_reg
= ["zmm5"],
278 zmm6
: zmm_reg
= ["zmm6"],
279 zmm7
: zmm_reg
= ["zmm7"],
280 zmm8
: zmm_reg
= ["zmm8"] % x86_64_only
,
281 zmm9
: zmm_reg
= ["zmm9"] % x86_64_only
,
282 zmm10
: zmm_reg
= ["zmm10"] % x86_64_only
,
283 zmm11
: zmm_reg
= ["zmm11"] % x86_64_only
,
284 zmm12
: zmm_reg
= ["zmm12"] % x86_64_only
,
285 zmm13
: zmm_reg
= ["zmm13"] % x86_64_only
,
286 zmm14
: zmm_reg
= ["zmm14"] % x86_64_only
,
287 zmm15
: zmm_reg
= ["zmm15"] % x86_64_only
,
288 zmm16
: zmm_reg
= ["zmm16", "xmm16", "ymm16"] % x86_64_only
,
289 zmm17
: zmm_reg
= ["zmm17", "xmm17", "ymm17"] % x86_64_only
,
290 zmm18
: zmm_reg
= ["zmm18", "xmm18", "ymm18"] % x86_64_only
,
291 zmm19
: zmm_reg
= ["zmm19", "xmm19", "ymm19"] % x86_64_only
,
292 zmm20
: zmm_reg
= ["zmm20", "xmm20", "ymm20"] % x86_64_only
,
293 zmm21
: zmm_reg
= ["zmm21", "xmm21", "ymm21"] % x86_64_only
,
294 zmm22
: zmm_reg
= ["zmm22", "xmm22", "ymm22"] % x86_64_only
,
295 zmm23
: zmm_reg
= ["zmm23", "xmm23", "ymm23"] % x86_64_only
,
296 zmm24
: zmm_reg
= ["zmm24", "xmm24", "ymm24"] % x86_64_only
,
297 zmm25
: zmm_reg
= ["zmm25", "xmm25", "ymm25"] % x86_64_only
,
298 zmm26
: zmm_reg
= ["zmm26", "xmm26", "ymm26"] % x86_64_only
,
299 zmm27
: zmm_reg
= ["zmm27", "xmm27", "ymm27"] % x86_64_only
,
300 zmm28
: zmm_reg
= ["zmm28", "xmm28", "ymm28"] % x86_64_only
,
301 zmm29
: zmm_reg
= ["zmm29", "xmm29", "ymm29"] % x86_64_only
,
302 zmm30
: zmm_reg
= ["zmm30", "xmm30", "ymm30"] % x86_64_only
,
303 zmm31
: zmm_reg
= ["zmm31", "xmm31", "ymm31"] % x86_64_only
,
312 mm0
: mmx_reg
= ["mm0"],
313 mm1
: mmx_reg
= ["mm1"],
314 mm2
: mmx_reg
= ["mm2"],
315 mm3
: mmx_reg
= ["mm3"],
316 mm4
: mmx_reg
= ["mm4"],
317 mm5
: mmx_reg
= ["mm5"],
318 mm6
: mmx_reg
= ["mm6"],
319 mm7
: mmx_reg
= ["mm7"],
320 st0
: x87_reg
= ["st(0)", "st"],
321 st1
: x87_reg
= ["st(1)"],
322 st2
: x87_reg
= ["st(2)"],
323 st3
: x87_reg
= ["st(3)"],
324 st4
: x87_reg
= ["st(4)"],
325 st5
: x87_reg
= ["st(5)"],
326 st6
: x87_reg
= ["st(6)"],
327 st7
: x87_reg
= ["st(7)"],
328 tmm0
: tmm_reg
= ["tmm0"] % x86_64_only
,
329 tmm1
: tmm_reg
= ["tmm1"] % x86_64_only
,
330 tmm2
: tmm_reg
= ["tmm2"] % x86_64_only
,
331 tmm3
: tmm_reg
= ["tmm3"] % x86_64_only
,
332 tmm4
: tmm_reg
= ["tmm4"] % x86_64_only
,
333 tmm5
: tmm_reg
= ["tmm5"] % x86_64_only
,
334 tmm6
: tmm_reg
= ["tmm6"] % x86_64_only
,
335 tmm7
: tmm_reg
= ["tmm7"] % x86_64_only
,
336 #error = ["bp", "bpl", "ebp", "rbp"] =>
337 "the frame pointer cannot be used as an operand for inline asm",
338 #error = ["sp", "spl", "esp", "rsp"] =>
339 "the stack pointer cannot be used as an operand for inline asm",
340 #error = ["ip", "eip", "rip"] =>
341 "the instruction pointer cannot be used as an operand for inline asm",
345 impl X86InlineAsmReg
{
348 out
: &mut dyn fmt
::Write
,
350 modifier
: Option
<char>,
352 let reg_default_modifier
= match arch
{
353 InlineAsmArch
::X86
=> 'e'
,
354 InlineAsmArch
::X86_64
=> 'r'
,
357 if self as u32 <= Self::dx
as u32 {
358 let root
= ['a'
, 'b'
, 'c'
, 'd'
][self as usize - Self::ax
as usize];
359 match modifier
.unwrap_or(reg_default_modifier
) {
360 'l'
=> write
!(out
, "{}l", root
),
361 'h'
=> write
!(out
, "{}h", root
),
362 'x'
=> write
!(out
, "{}x", root
),
363 'e'
=> write
!(out
, "e{}x", root
),
364 'r'
=> write
!(out
, "r{}x", root
),
367 } else if self as u32 <= Self::di
as u32 {
368 let root
= self.name();
369 match modifier
.unwrap_or(reg_default_modifier
) {
370 'l'
=> write
!(out
, "{}l", root
),
371 'x'
=> write
!(out
, "{}", root
),
372 'e'
=> write
!(out
, "e{}", root
),
373 'r'
=> write
!(out
, "r{}", root
),
376 } else if self as u32 <= Self::r15
as u32 {
377 let root
= self.name();
378 match modifier
.unwrap_or(reg_default_modifier
) {
379 'l'
=> write
!(out
, "{}b", root
),
380 'x'
=> write
!(out
, "{}w", root
),
381 'e'
=> write
!(out
, "{}d", root
),
382 'r'
=> out
.write_str(root
),
385 } else if self as u32 <= Self::r15b
as u32 {
386 out
.write_str(self.name())
387 } else if self as u32 <= Self::xmm15
as u32 {
388 let prefix
= modifier
.unwrap_or('x'
);
389 let index
= self as u32 - Self::xmm0
as u32;
390 write
!(out
, "{}{}", prefix
, index
)
391 } else if self as u32 <= Self::ymm15
as u32 {
392 let prefix
= modifier
.unwrap_or('y'
);
393 let index
= self as u32 - Self::ymm0
as u32;
394 write
!(out
, "{}{}", prefix
, index
)
395 } else if self as u32 <= Self::zmm31
as u32 {
396 let prefix
= modifier
.unwrap_or('z'
);
397 let index
= self as u32 - Self::zmm0
as u32;
398 write
!(out
, "{}{}", prefix
, index
)
400 out
.write_str(self.name())
404 pub fn overlapping_regs(self, mut cb
: impl FnMut(X86InlineAsmReg
)) {
405 macro_rules
! reg_conflicts
{
408 $w
:ident
: $l
:ident $h
:ident
411 $w2
:ident
: $l2
:ident
414 $x
:ident
: $y
:ident
: $z
:ident
434 Self::$w2
| Self::$l2
=> {
440 Self::$x
| Self::$y
| Self::$z
=> {
451 // XMM*, YMM* and ZMM* are all different views of the same register.
453 // See section 15.5 of the combined Intel® 64 and IA-32 Architectures
454 // Software Developer’s Manual for more details.
456 // We don't need to specify conflicts for [x,y,z]mm[16-31] since these
457 // registers are only available with AVX-512, so we just specify them
458 // as aliases directly.
484 xmm10
: ymm10
: zmm10
,
485 xmm11
: ymm11
: zmm11
,
486 xmm12
: ymm12
: zmm12
,
487 xmm13
: ymm13
: zmm13
,
488 xmm14
: ymm14
: zmm14
,
489 xmm15
: ymm15
: zmm15
;