1 //! Codegen of [`asm!`] invocations.
7 use rustc_ast
::ast
::{InlineAsmOptions, InlineAsmTemplatePiece}
;
8 use rustc_middle
::mir
::InlineAsmOperand
;
9 use rustc_target
::asm
::*;
11 pub(crate) fn codegen_inline_asm
<'tcx
>(
12 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
14 template
: &[InlineAsmTemplatePiece
],
15 operands
: &[InlineAsmOperand
<'tcx
>],
16 options
: InlineAsmOptions
,
18 // FIXME add .eh_frame unwind info directives
20 if template
.is_empty() {
25 let mut slot_size
= Size
::from_bytes(0);
26 let mut clobbered_regs
= Vec
::new();
27 let mut inputs
= Vec
::new();
28 let mut outputs
= Vec
::new();
30 let mut new_slot
= |reg_class
: InlineAsmRegClass
| {
31 let reg_size
= reg_class
32 .supported_types(InlineAsmArch
::X86_64
)
34 .map(|(ty
, _
)| ty
.size())
37 let align
= rustc_target
::abi
::Align
::from_bytes(reg_size
.bytes()).unwrap();
38 slot_size
= slot_size
.align_to(align
);
39 let offset
= slot_size
;
40 slot_size
+= reg_size
;
44 // FIXME overlap input and output slots to save stack space
45 for operand
in operands
{
47 InlineAsmOperand
::In { reg, ref value }
=> {
48 let reg
= expect_reg(reg
);
49 clobbered_regs
.push((reg
, new_slot(reg
.reg_class())));
52 new_slot(reg
.reg_class()),
53 crate::base
::codegen_operand(fx
, value
).load_scalar(fx
),
56 InlineAsmOperand
::Out { reg, late: _, place }
=> {
57 let reg
= expect_reg(reg
);
58 clobbered_regs
.push((reg
, new_slot(reg
.reg_class())));
59 if let Some(place
) = place
{
62 new_slot(reg
.reg_class()),
63 crate::base
::codegen_place(fx
, place
),
67 InlineAsmOperand
::InOut { reg, late: _, ref in_value, out_place }
=> {
68 let reg
= expect_reg(reg
);
69 clobbered_regs
.push((reg
, new_slot(reg
.reg_class())));
72 new_slot(reg
.reg_class()),
73 crate::base
::codegen_operand(fx
, in_value
).load_scalar(fx
),
75 if let Some(out_place
) = out_place
{
78 new_slot(reg
.reg_class()),
79 crate::base
::codegen_place(fx
, out_place
),
83 InlineAsmOperand
::Const { value: _ }
=> todo
!(),
84 InlineAsmOperand
::SymFn { value: _ }
=> todo
!(),
85 InlineAsmOperand
::SymStatic { def_id: _ }
=> todo
!(),
89 let inline_asm_index
= fx
.inline_asm_index
;
90 fx
.inline_asm_index
+= 1;
92 format
!("{}__inline_asm_{}", fx
.tcx
.symbol_name(fx
.instance
).name
, inline_asm_index
);
94 let generated_asm
= generate_asm_wrapper(
96 InlineAsmArch
::X86_64
,
103 fx
.cx
.global_asm
.push_str(&generated_asm
);
105 call_inline_asm(fx
, &asm_name
, slot_size
, inputs
, outputs
);
108 fn generate_asm_wrapper(
111 options
: InlineAsmOptions
,
112 template
: &[InlineAsmTemplatePiece
],
113 clobbered_regs
: Vec
<(InlineAsmReg
, Size
)>,
114 inputs
: &[(InlineAsmReg
, Size
, Value
)],
115 outputs
: &[(InlineAsmReg
, Size
, CPlace
<'_
>)],
117 let mut generated_asm
= String
::new();
118 writeln
!(generated_asm
, ".globl {}", asm_name
).unwrap();
119 writeln
!(generated_asm
, ".type {},@function", asm_name
).unwrap();
120 writeln
!(generated_asm
, ".section .text.{},\"ax\",@progbits", asm_name
).unwrap();
121 writeln
!(generated_asm
, "{}:", asm_name
).unwrap();
123 generated_asm
.push_str(".intel_syntax noprefix\n");
124 generated_asm
.push_str(" push rbp\n");
125 generated_asm
.push_str(" mov rbp,rdi\n");
127 // Save clobbered registers
128 if !options
.contains(InlineAsmOptions
::NORETURN
) {
129 // FIXME skip registers saved by the calling convention
130 for &(reg
, offset
) in &clobbered_regs
{
131 save_register(&mut generated_asm
, arch
, reg
, offset
);
135 // Write input registers
136 for &(reg
, offset
, _value
) in inputs
{
137 restore_register(&mut generated_asm
, arch
, reg
, offset
);
140 if options
.contains(InlineAsmOptions
::ATT_SYNTAX
) {
141 generated_asm
.push_str(".att_syntax\n");
144 // The actual inline asm
145 for piece
in template
{
147 InlineAsmTemplatePiece
::String(s
) => {
148 generated_asm
.push_str(s
);
150 InlineAsmTemplatePiece
::Placeholder { operand_idx: _, modifier: _, span: _ }
=> todo
!(),
153 generated_asm
.push('
\n'
);
155 if options
.contains(InlineAsmOptions
::ATT_SYNTAX
) {
156 generated_asm
.push_str(".intel_syntax noprefix\n");
159 if !options
.contains(InlineAsmOptions
::NORETURN
) {
160 // Read output registers
161 for &(reg
, offset
, _place
) in outputs
{
162 save_register(&mut generated_asm
, arch
, reg
, offset
);
165 // Restore clobbered registers
166 for &(reg
, offset
) in clobbered_regs
.iter().rev() {
167 restore_register(&mut generated_asm
, arch
, reg
, offset
);
170 generated_asm
.push_str(" pop rbp\n");
171 generated_asm
.push_str(" ret\n");
173 generated_asm
.push_str(" ud2\n");
176 generated_asm
.push_str(".att_syntax\n");
177 writeln
!(generated_asm
, ".size {name}, .-{name}", name
= asm_name
).unwrap();
178 generated_asm
.push_str(".text\n");
179 generated_asm
.push_str("\n\n");
184 fn call_inline_asm
<'tcx
>(
185 fx
: &mut FunctionCx
<'_
, '_
, 'tcx
>,
188 inputs
: Vec
<(InlineAsmReg
, Size
, Value
)>,
189 outputs
: Vec
<(InlineAsmReg
, Size
, CPlace
<'tcx
>)>,
191 let stack_slot
= fx
.bcx
.func
.create_stack_slot(StackSlotData
{
192 kind
: StackSlotKind
::ExplicitSlot
,
194 size
: u32::try_from(slot_size
.bytes()).unwrap(),
196 #[cfg(debug_assertions)]
197 fx
.add_comment(stack_slot
, "inline asm scratch slot");
199 let inline_asm_func
= fx
206 call_conv
: CallConv
::SystemV
,
207 params
: vec
![AbiParam
::new(fx
.pointer_type
)],
212 let inline_asm_func
= fx
.cx
.module
.declare_func_in_func(inline_asm_func
, &mut fx
.bcx
.func
);
213 #[cfg(debug_assertions)]
214 fx
.add_comment(inline_asm_func
, asm_name
);
216 for (_reg
, offset
, value
) in inputs
{
217 fx
.bcx
.ins().stack_store(value
, stack_slot
, i32::try_from(offset
.bytes()).unwrap());
220 let stack_slot_addr
= fx
.bcx
.ins().stack_addr(fx
.pointer_type
, stack_slot
, 0);
221 fx
.bcx
.ins().call(inline_asm_func
, &[stack_slot_addr
]);
223 for (_reg
, offset
, place
) in outputs
{
224 let ty
= fx
.clif_type(place
.layout().ty
).unwrap();
225 let value
= fx
.bcx
.ins().stack_load(ty
, stack_slot
, i32::try_from(offset
.bytes()).unwrap());
226 place
.write_cvalue(fx
, CValue
::by_val(value
, place
.layout()));
230 fn expect_reg(reg_or_class
: InlineAsmRegOrRegClass
) -> InlineAsmReg
{
232 InlineAsmRegOrRegClass
::Reg(reg
) => reg
,
233 InlineAsmRegOrRegClass
::RegClass(class
) => unimplemented
!("{:?}", class
),
237 fn save_register(generated_asm
: &mut String
, arch
: InlineAsmArch
, reg
: InlineAsmReg
, offset
: Size
) {
239 InlineAsmArch
::X86_64
=> {
240 write
!(generated_asm
, " mov [rbp+0x{:x}], ", offset
.bytes()).unwrap();
241 reg
.emit(generated_asm
, InlineAsmArch
::X86_64
, None
).unwrap();
242 generated_asm
.push('
\n'
);
244 _
=> unimplemented
!("save_register for {:?}", arch
),
249 generated_asm
: &mut String
,
255 InlineAsmArch
::X86_64
=> {
256 generated_asm
.push_str(" mov ");
257 reg
.emit(generated_asm
, InlineAsmArch
::X86_64
, None
).unwrap();
258 writeln
!(generated_asm
, ", [rbp+0x{:x}]", offset
.bytes()).unwrap();
260 _
=> unimplemented
!("restore_register for {:?}", arch
),