]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_ast_lowering/src/asm.rs
New upstream version 1.66.0+dfsg1
[rustc.git] / compiler / rustc_ast_lowering / src / asm.rs
CommitLineData
923072b8 1use crate::{ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt};
04454e1e 2
f2b60f7d
FG
3use super::errors::{
4 AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
5 InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
6 InvalidAsmTemplateModifierRegClass, InvalidAsmTemplateModifierRegClassSub,
7 InvalidAsmTemplateModifierSym, InvalidRegister, InvalidRegisterClass, RegisterClassOnlyClobber,
8 RegisterConflict,
9};
17df50a5
XL
10use super::LoweringContext;
11
04454e1e 12use rustc_ast::ptr::P;
17df50a5 13use rustc_ast::*;
064997fb 14use rustc_data_structures::fx::{FxHashMap, FxHashSet};
17df50a5 15use rustc_hir as hir;
04454e1e
FG
16use rustc_hir::def::{DefKind, Res};
17use rustc_hir::definitions::DefPathData;
3c0e092e 18use rustc_session::parse::feature_err;
923072b8 19use rustc_span::{sym, Span};
17df50a5
XL
20use rustc_target::asm;
21use std::collections::hash_map::Entry;
22use std::fmt::Write;
23
24impl<'a, 'hir> LoweringContext<'a, 'hir> {
923072b8
FG
25 pub(crate) fn lower_inline_asm(
26 &mut self,
27 sp: Span,
28 asm: &InlineAsm,
29 ) -> &'hir hir::InlineAsm<'hir> {
3c0e092e
XL
30 // Rustdoc needs to support asm! from foreign architectures: don't try
31 // lowering the register constraints in this case.
064997fb
FG
32 let asm_arch =
33 if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
34 if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
f2b60f7d 35 self.tcx.sess.emit_err(InlineAsmUnsupportedTarget { span: sp });
17df50a5 36 }
3c0e092e
XL
37 if let Some(asm_arch) = asm_arch {
38 // Inline assembly is currently only stable for these architectures.
39 let is_stable = matches!(
40 asm_arch,
41 asm::InlineAsmArch::X86
42 | asm::InlineAsmArch::X86_64
43 | asm::InlineAsmArch::Arm
44 | asm::InlineAsmArch::AArch64
45 | asm::InlineAsmArch::RiscV32
46 | asm::InlineAsmArch::RiscV64
47 );
064997fb 48 if !is_stable && !self.tcx.features().asm_experimental_arch {
3c0e092e 49 feature_err(
064997fb 50 &self.tcx.sess.parse_sess,
3c0e092e
XL
51 sym::asm_experimental_arch,
52 sp,
53 "inline assembly is not stable yet on this architecture",
54 )
55 .emit();
56 }
57 }
17df50a5
XL
58 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
59 && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
064997fb 60 && !self.tcx.sess.opts.actually_rustdoc
17df50a5 61 {
f2b60f7d 62 self.tcx.sess.emit_err(AttSyntaxOnlyX86 { span: sp });
17df50a5 63 }
064997fb 64 if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind {
a2a8927a 65 feature_err(
064997fb 66 &self.tcx.sess.parse_sess,
a2a8927a
XL
67 sym::asm_unwind,
68 sp,
69 "the `may_unwind` option is unstable",
70 )
71 .emit();
72 }
17df50a5 73
3c0e092e 74 let mut clobber_abis = FxHashMap::default();
94222f64 75 if let Some(asm_arch) = asm_arch {
3c0e092e 76 for (abi_name, abi_span) in &asm.clobber_abis {
064997fb 77 match asm::InlineAsmClobberAbi::parse(asm_arch, &self.tcx.sess.target, *abi_name) {
3c0e092e
XL
78 Ok(abi) => {
79 // If the abi was already in the list, emit an error
80 match clobber_abis.get(&abi) {
81 Some((prev_name, prev_sp)) => {
3c0e092e
XL
82 // Multiple different abi names may actually be the same ABI
83 // If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
064997fb 84 let source_map = self.tcx.sess.source_map();
f2b60f7d
FG
85 let equivalent = (source_map.span_to_snippet(*prev_sp)
86 != source_map.span_to_snippet(*abi_span))
87 .then_some(());
3c0e092e 88
f2b60f7d
FG
89 self.tcx.sess.emit_err(AbiSpecifiedMultipleTimes {
90 abi_span: *abi_span,
91 prev_name: *prev_name,
92 prev_span: *prev_sp,
93 equivalent,
94 });
3c0e092e
XL
95 }
96 None => {
f2b60f7d 97 clobber_abis.insert(abi, (*abi_name, *abi_span));
3c0e092e
XL
98 }
99 }
100 }
94222f64 101 Err(&[]) => {
f2b60f7d 102 self.tcx.sess.emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
94222f64
XL
103 }
104 Err(supported_abis) => {
94222f64
XL
105 let mut abis = format!("`{}`", supported_abis[0]);
106 for m in &supported_abis[1..] {
107 let _ = write!(abis, ", `{}`", m);
108 }
f2b60f7d
FG
109 self.tcx.sess.emit_err(InvalidAbiClobberAbi {
110 abi_span: *abi_span,
111 supported_abis: abis,
112 });
94222f64
XL
113 }
114 }
115 }
116 }
117
17df50a5
XL
118 // Lower operands to HIR. We use dummy register classes if an error
119 // occurs during lowering because we still need to be able to produce a
120 // valid HIR.
064997fb 121 let sess = self.tcx.sess;
94222f64 122 let mut operands: Vec<_> = asm
17df50a5
XL
123 .operands
124 .iter()
125 .map(|(op, op_sp)| {
5e7ed085 126 let lower_reg = |reg| match reg {
f2b60f7d 127 InlineAsmRegOrRegClass::Reg(reg) => {
17df50a5 128 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
f2b60f7d
FG
129 asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
130 sess.emit_err(InvalidRegister { op_span: *op_sp, reg, error });
17df50a5
XL
131 asm::InlineAsmReg::Err
132 })
133 } else {
134 asm::InlineAsmReg::Err
135 })
136 }
f2b60f7d 137 InlineAsmRegOrRegClass::RegClass(reg_class) => {
17df50a5 138 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
f2b60f7d
FG
139 asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
140 |error| {
141 sess.emit_err(InvalidRegisterClass {
142 op_span: *op_sp,
143 reg_class,
144 error,
145 });
146 asm::InlineAsmRegClass::Err
147 },
148 )
17df50a5
XL
149 } else {
150 asm::InlineAsmRegClass::Err
151 })
152 }
153 };
154
155 let op = match *op {
156 InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
5e7ed085 157 reg: lower_reg(reg),
f2b60f7d 158 expr: self.lower_expr(expr),
17df50a5
XL
159 },
160 InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
5e7ed085 161 reg: lower_reg(reg),
17df50a5 162 late,
f2b60f7d 163 expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
17df50a5
XL
164 },
165 InlineAsmOperand::InOut { reg, late, ref expr } => {
166 hir::InlineAsmOperand::InOut {
5e7ed085 167 reg: lower_reg(reg),
17df50a5 168 late,
f2b60f7d 169 expr: self.lower_expr(expr),
17df50a5
XL
170 }
171 }
172 InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
173 hir::InlineAsmOperand::SplitInOut {
5e7ed085 174 reg: lower_reg(reg),
17df50a5 175 late,
f2b60f7d
FG
176 in_expr: self.lower_expr(in_expr),
177 out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
17df50a5
XL
178 }
179 }
3c0e092e 180 InlineAsmOperand::Const { ref anon_const } => {
064997fb 181 if !self.tcx.features().asm_const {
3c0e092e 182 feature_err(
064997fb 183 &sess.parse_sess,
3c0e092e
XL
184 sym::asm_const,
185 *op_sp,
186 "const operands for inline assembly are unstable",
187 )
188 .emit();
189 }
190 hir::InlineAsmOperand::Const {
191 anon_const: self.lower_anon_const(anon_const),
192 }
193 }
04454e1e 194 InlineAsmOperand::Sym { ref sym } => {
04454e1e
FG
195 let static_def_id = self
196 .resolver
197 .get_partial_res(sym.id)
2b03887a
FG
198 .and_then(|res| res.full_res())
199 .and_then(|res| match res {
200 Res::Def(DefKind::Static(_), def_id) => Some(def_id),
201 _ => None,
04454e1e
FG
202 });
203
204 if let Some(def_id) = static_def_id {
205 let path = self.lower_qpath(
206 sym.id,
207 &sym.qself,
208 &sym.path,
209 ParamMode::Optional,
f2b60f7d 210 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
04454e1e
FG
211 );
212 hir::InlineAsmOperand::SymStatic { path, def_id }
213 } else {
214 // Replace the InlineAsmSym AST node with an
215 // Expr using the name node id.
216 let expr = Expr {
217 id: sym.id,
218 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
219 span: *op_sp,
220 attrs: AttrVec::new(),
221 tokens: None,
222 };
223
224 // Wrap the expression in an AnonConst.
225 let parent_def_id = self.current_hir_id_owner;
923072b8 226 let node_id = self.next_node_id();
2b03887a 227 self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst);
04454e1e
FG
228 let anon_const = AnonConst { id: node_id, value: P(expr) };
229 hir::InlineAsmOperand::SymFn {
230 anon_const: self.lower_anon_const(&anon_const),
231 }
232 }
17df50a5
XL
233 }
234 };
94222f64 235 (op, self.lower_span(*op_sp))
17df50a5
XL
236 })
237 .collect();
238
239 // Validate template modifiers against the register classes for the operands
240 for p in &asm.template {
241 if let InlineAsmTemplatePiece::Placeholder {
242 operand_idx,
243 modifier: Some(modifier),
244 span: placeholder_span,
245 } = *p
246 {
247 let op_sp = asm.operands[operand_idx].1;
248 match &operands[operand_idx].0 {
249 hir::InlineAsmOperand::In { reg, .. }
250 | hir::InlineAsmOperand::Out { reg, .. }
251 | hir::InlineAsmOperand::InOut { reg, .. }
252 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
253 let class = reg.reg_class();
254 if class == asm::InlineAsmRegClass::Err {
255 continue;
256 }
257 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
258 if !valid_modifiers.contains(&modifier) {
f2b60f7d 259 let sub = if !valid_modifiers.is_empty() {
17df50a5
XL
260 let mut mods = format!("`{}`", valid_modifiers[0]);
261 for m in &valid_modifiers[1..] {
262 let _ = write!(mods, ", `{}`", m);
263 }
f2b60f7d
FG
264 InvalidAsmTemplateModifierRegClassSub::SupportModifier {
265 class_name: class.name(),
266 modifiers: mods,
267 }
17df50a5 268 } else {
f2b60f7d
FG
269 InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
270 class_name: class.name(),
271 }
272 };
273 sess.emit_err(InvalidAsmTemplateModifierRegClass {
274 placeholder_span,
275 op_span: op_sp,
276 sub,
277 });
17df50a5
XL
278 }
279 }
280 hir::InlineAsmOperand::Const { .. } => {
f2b60f7d 281 sess.emit_err(InvalidAsmTemplateModifierConst {
17df50a5 282 placeholder_span,
f2b60f7d
FG
283 op_span: op_sp,
284 });
17df50a5 285 }
04454e1e
FG
286 hir::InlineAsmOperand::SymFn { .. }
287 | hir::InlineAsmOperand::SymStatic { .. } => {
f2b60f7d 288 sess.emit_err(InvalidAsmTemplateModifierSym {
17df50a5 289 placeholder_span,
f2b60f7d
FG
290 op_span: op_sp,
291 });
17df50a5
XL
292 }
293 }
294 }
295 }
296
297 let mut used_input_regs = FxHashMap::default();
298 let mut used_output_regs = FxHashMap::default();
c295e0f8 299
17df50a5
XL
300 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
301 if let Some(reg) = op.reg() {
17df50a5
XL
302 let reg_class = reg.reg_class();
303 if reg_class == asm::InlineAsmRegClass::Err {
304 continue;
305 }
306
136023e0
XL
307 // Some register classes can only be used as clobbers. This
308 // means that we disallow passing a value in/out of the asm and
309 // require that the operand name an explicit register, not a
310 // register class.
3c0e092e 311 if reg_class.is_clobber_only(asm_arch.unwrap()) && !op.is_clobber() {
f2b60f7d
FG
312 sess.emit_err(RegisterClassOnlyClobber {
313 op_span: op_sp,
314 reg_class_name: reg_class.name(),
315 });
136023e0
XL
316 continue;
317 }
318
17df50a5
XL
319 // Check for conflicts between explicit register operands.
320 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
321 let (input, output) = match op {
322 hir::InlineAsmOperand::In { .. } => (true, false),
323
324 // Late output do not conflict with inputs, but normal outputs do
325 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
326
327 hir::InlineAsmOperand::InOut { .. }
328 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
329
04454e1e
FG
330 hir::InlineAsmOperand::Const { .. }
331 | hir::InlineAsmOperand::SymFn { .. }
332 | hir::InlineAsmOperand::SymStatic { .. } => {
17df50a5
XL
333 unreachable!()
334 }
335 };
336
337 // Flag to output the error only once per operand
338 let mut skip = false;
339 reg.overlapping_regs(|r| {
340 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
341 input| {
342 match used_regs.entry(r) {
343 Entry::Occupied(o) => {
344 if skip {
345 return;
346 }
347 skip = true;
348
349 let idx2 = *o.get();
350 let &(ref op2, op_sp2) = &operands[idx2];
5e7ed085
FG
351 let Some(asm::InlineAsmRegOrRegClass::Reg(reg2)) = op2.reg() else {
352 unreachable!();
17df50a5
XL
353 };
354
f2b60f7d 355 let in_out = match (op, op2) {
17df50a5
XL
356 (
357 hir::InlineAsmOperand::In { .. },
358 hir::InlineAsmOperand::Out { late, .. },
359 )
360 | (
361 hir::InlineAsmOperand::Out { late, .. },
362 hir::InlineAsmOperand::In { .. },
363 ) => {
364 assert!(!*late);
365 let out_op_sp = if input { op_sp2 } else { op_sp };
f2b60f7d
FG
366 Some(out_op_sp)
367 },
368 _ => None,
369 };
17df50a5 370
f2b60f7d
FG
371 sess.emit_err(RegisterConflict {
372 op_span1: op_sp,
373 op_span2: op_sp2,
374 reg1_name: reg.name(),
375 reg2_name: reg2.name(),
376 in_out
377 });
17df50a5
XL
378 }
379 Entry::Vacant(v) => {
5099ac24
FG
380 if r == reg {
381 v.insert(idx);
382 }
17df50a5
XL
383 }
384 }
385 };
386 if input {
387 check(&mut used_input_regs, true);
388 }
389 if output {
390 check(&mut used_output_regs, false);
391 }
392 });
393 }
394 }
395 }
396
94222f64
XL
397 // If a clobber_abi is specified, add the necessary clobbers to the
398 // operands list.
3c0e092e
XL
399 let mut clobbered = FxHashSet::default();
400 for (abi, (_, abi_span)) in clobber_abis {
94222f64 401 for &clobber in abi.clobbered_regs() {
3c0e092e
XL
402 // Don't emit a clobber for a register already clobbered
403 if clobbered.contains(&clobber) {
404 continue;
405 }
406
94222f64
XL
407 let mut output_used = false;
408 clobber.overlapping_regs(|reg| {
409 if used_output_regs.contains_key(&reg) {
410 output_used = true;
411 }
412 });
413
414 if !output_used {
415 operands.push((
416 hir::InlineAsmOperand::Out {
417 reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
418 late: true,
419 expr: None,
420 },
421 self.lower_span(abi_span),
422 ));
3c0e092e 423 clobbered.insert(clobber);
94222f64
XL
424 }
425 }
426 }
427
17df50a5
XL
428 let operands = self.arena.alloc_from_iter(operands);
429 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
94222f64
XL
430 let template_strs = self.arena.alloc_from_iter(
431 asm.template_strs
432 .iter()
433 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
434 );
435 let line_spans =
436 self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
437 let hir_asm =
438 hir::InlineAsm { template, template_strs, operands, options: asm.options, line_spans };
17df50a5
XL
439 self.arena.alloc(hir_asm)
440 }
441}