]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_ast_lowering/src/asm.rs
New upstream version 1.63.0+dfsg1
[rustc.git] / compiler / rustc_ast_lowering / src / asm.rs
CommitLineData
923072b8 1use crate::{ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt};
04454e1e 2
17df50a5
XL
3use super::LoweringContext;
4
04454e1e 5use rustc_ast::ptr::P;
17df50a5
XL
6use rustc_ast::*;
7use rustc_data_structures::fx::FxHashMap;
3c0e092e 8use rustc_data_structures::stable_set::FxHashSet;
17df50a5
XL
9use rustc_errors::struct_span_err;
10use rustc_hir as hir;
04454e1e
FG
11use rustc_hir::def::{DefKind, Res};
12use rustc_hir::definitions::DefPathData;
3c0e092e 13use rustc_session::parse::feature_err;
923072b8 14use rustc_span::{sym, Span};
17df50a5
XL
15use rustc_target::asm;
16use std::collections::hash_map::Entry;
17use std::fmt::Write;
18
19impl<'a, 'hir> LoweringContext<'a, 'hir> {
923072b8
FG
20 pub(crate) fn lower_inline_asm(
21 &mut self,
22 sp: Span,
23 asm: &InlineAsm,
24 ) -> &'hir hir::InlineAsm<'hir> {
3c0e092e
XL
25 // Rustdoc needs to support asm! from foreign architectures: don't try
26 // lowering the register constraints in this case.
17df50a5
XL
27 let asm_arch = if self.sess.opts.actually_rustdoc { None } else { self.sess.asm_arch };
28 if asm_arch.is_none() && !self.sess.opts.actually_rustdoc {
29 struct_span_err!(self.sess, sp, E0472, "inline assembly is unsupported on this target")
30 .emit();
31 }
3c0e092e
XL
32 if let Some(asm_arch) = asm_arch {
33 // Inline assembly is currently only stable for these architectures.
34 let is_stable = matches!(
35 asm_arch,
36 asm::InlineAsmArch::X86
37 | asm::InlineAsmArch::X86_64
38 | asm::InlineAsmArch::Arm
39 | asm::InlineAsmArch::AArch64
40 | asm::InlineAsmArch::RiscV32
41 | asm::InlineAsmArch::RiscV64
42 );
43 if !is_stable && !self.sess.features_untracked().asm_experimental_arch {
44 feature_err(
45 &self.sess.parse_sess,
46 sym::asm_experimental_arch,
47 sp,
48 "inline assembly is not stable yet on this architecture",
49 )
50 .emit();
51 }
52 }
17df50a5
XL
53 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
54 && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
55 && !self.sess.opts.actually_rustdoc
56 {
57 self.sess
58 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
59 .emit();
60 }
a2a8927a
XL
61 if asm.options.contains(InlineAsmOptions::MAY_UNWIND)
62 && !self.sess.features_untracked().asm_unwind
63 {
64 feature_err(
65 &self.sess.parse_sess,
66 sym::asm_unwind,
67 sp,
68 "the `may_unwind` option is unstable",
69 )
70 .emit();
71 }
17df50a5 72
3c0e092e 73 let mut clobber_abis = FxHashMap::default();
94222f64 74 if let Some(asm_arch) = asm_arch {
3c0e092e 75 for (abi_name, abi_span) in &asm.clobber_abis {
5e7ed085 76 match asm::InlineAsmClobberAbi::parse(asm_arch, &self.sess.target, *abi_name) {
3c0e092e
XL
77 Ok(abi) => {
78 // If the abi was already in the list, emit an error
79 match clobber_abis.get(&abi) {
80 Some((prev_name, prev_sp)) => {
81 let mut err = self.sess.struct_span_err(
82 *abi_span,
83 &format!("`{}` ABI specified multiple times", prev_name),
84 );
85 err.span_label(*prev_sp, "previously specified here");
86
87 // Multiple different abi names may actually be the same ABI
88 // If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
89 let source_map = self.sess.source_map();
90 if source_map.span_to_snippet(*prev_sp)
91 != source_map.span_to_snippet(*abi_span)
92 {
93 err.note("these ABIs are equivalent on the current target");
94 }
95
96 err.emit();
97 }
98 None => {
99 clobber_abis.insert(abi, (abi_name, *abi_span));
100 }
101 }
102 }
94222f64
XL
103 Err(&[]) => {
104 self.sess
105 .struct_span_err(
3c0e092e 106 *abi_span,
94222f64
XL
107 "`clobber_abi` is not supported on this target",
108 )
109 .emit();
110 }
111 Err(supported_abis) => {
112 let mut err =
3c0e092e 113 self.sess.struct_span_err(*abi_span, "invalid ABI for `clobber_abi`");
94222f64
XL
114 let mut abis = format!("`{}`", supported_abis[0]);
115 for m in &supported_abis[1..] {
116 let _ = write!(abis, ", `{}`", m);
117 }
118 err.note(&format!(
119 "the following ABIs are supported on this target: {}",
120 abis
121 ));
122 err.emit();
123 }
124 }
125 }
126 }
127
17df50a5
XL
128 // Lower operands to HIR. We use dummy register classes if an error
129 // occurs during lowering because we still need to be able to produce a
130 // valid HIR.
131 let sess = self.sess;
94222f64 132 let mut operands: Vec<_> = asm
17df50a5
XL
133 .operands
134 .iter()
135 .map(|(op, op_sp)| {
5e7ed085 136 let lower_reg = |reg| match reg {
17df50a5
XL
137 InlineAsmRegOrRegClass::Reg(s) => {
138 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
5e7ed085 139 asm::InlineAsmReg::parse(asm_arch, s).unwrap_or_else(|e| {
17df50a5
XL
140 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
141 sess.struct_span_err(*op_sp, &msg).emit();
142 asm::InlineAsmReg::Err
143 })
144 } else {
145 asm::InlineAsmReg::Err
146 })
147 }
148 InlineAsmRegOrRegClass::RegClass(s) => {
149 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
150 asm::InlineAsmRegClass::parse(asm_arch, s).unwrap_or_else(|e| {
151 let msg = format!("invalid register class `{}`: {}", s.as_str(), e);
152 sess.struct_span_err(*op_sp, &msg).emit();
153 asm::InlineAsmRegClass::Err
154 })
155 } else {
156 asm::InlineAsmRegClass::Err
157 })
158 }
159 };
160
161 let op = match *op {
162 InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
5e7ed085 163 reg: lower_reg(reg),
17df50a5
XL
164 expr: self.lower_expr_mut(expr),
165 },
166 InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
5e7ed085 167 reg: lower_reg(reg),
17df50a5
XL
168 late,
169 expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
170 },
171 InlineAsmOperand::InOut { reg, late, ref expr } => {
172 hir::InlineAsmOperand::InOut {
5e7ed085 173 reg: lower_reg(reg),
17df50a5
XL
174 late,
175 expr: self.lower_expr_mut(expr),
176 }
177 }
178 InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
179 hir::InlineAsmOperand::SplitInOut {
5e7ed085 180 reg: lower_reg(reg),
17df50a5
XL
181 late,
182 in_expr: self.lower_expr_mut(in_expr),
183 out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
184 }
185 }
3c0e092e
XL
186 InlineAsmOperand::Const { ref anon_const } => {
187 if !self.sess.features_untracked().asm_const {
188 feature_err(
189 &self.sess.parse_sess,
190 sym::asm_const,
191 *op_sp,
192 "const operands for inline assembly are unstable",
193 )
194 .emit();
195 }
196 hir::InlineAsmOperand::Const {
197 anon_const: self.lower_anon_const(anon_const),
198 }
199 }
04454e1e 200 InlineAsmOperand::Sym { ref sym } => {
3c0e092e
XL
201 if !self.sess.features_untracked().asm_sym {
202 feature_err(
203 &self.sess.parse_sess,
204 sym::asm_sym,
205 *op_sp,
206 "sym operands for inline assembly are unstable",
207 )
208 .emit();
209 }
04454e1e
FG
210
211 let static_def_id = self
212 .resolver
213 .get_partial_res(sym.id)
214 .filter(|res| res.unresolved_segments() == 0)
215 .and_then(|res| {
216 if let Res::Def(DefKind::Static(_), def_id) = res.base_res() {
217 Some(def_id)
218 } else {
219 None
220 }
221 });
222
223 if let Some(def_id) = static_def_id {
224 let path = self.lower_qpath(
225 sym.id,
226 &sym.qself,
227 &sym.path,
228 ParamMode::Optional,
229 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
230 );
231 hir::InlineAsmOperand::SymStatic { path, def_id }
232 } else {
233 // Replace the InlineAsmSym AST node with an
234 // Expr using the name node id.
235 let expr = Expr {
236 id: sym.id,
237 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
238 span: *op_sp,
239 attrs: AttrVec::new(),
240 tokens: None,
241 };
242
243 // Wrap the expression in an AnonConst.
244 let parent_def_id = self.current_hir_id_owner;
923072b8
FG
245 let node_id = self.next_node_id();
246 self.create_def(parent_def_id, node_id, DefPathData::AnonConst);
04454e1e
FG
247 let anon_const = AnonConst { id: node_id, value: P(expr) };
248 hir::InlineAsmOperand::SymFn {
249 anon_const: self.lower_anon_const(&anon_const),
250 }
251 }
17df50a5
XL
252 }
253 };
94222f64 254 (op, self.lower_span(*op_sp))
17df50a5
XL
255 })
256 .collect();
257
258 // Validate template modifiers against the register classes for the operands
259 for p in &asm.template {
260 if let InlineAsmTemplatePiece::Placeholder {
261 operand_idx,
262 modifier: Some(modifier),
263 span: placeholder_span,
264 } = *p
265 {
266 let op_sp = asm.operands[operand_idx].1;
267 match &operands[operand_idx].0 {
268 hir::InlineAsmOperand::In { reg, .. }
269 | hir::InlineAsmOperand::Out { reg, .. }
270 | hir::InlineAsmOperand::InOut { reg, .. }
271 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
272 let class = reg.reg_class();
273 if class == asm::InlineAsmRegClass::Err {
274 continue;
275 }
276 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
277 if !valid_modifiers.contains(&modifier) {
278 let mut err = sess.struct_span_err(
279 placeholder_span,
280 "invalid asm template modifier for this register class",
281 );
282 err.span_label(placeholder_span, "template modifier");
283 err.span_label(op_sp, "argument");
284 if !valid_modifiers.is_empty() {
285 let mut mods = format!("`{}`", valid_modifiers[0]);
286 for m in &valid_modifiers[1..] {
287 let _ = write!(mods, ", `{}`", m);
288 }
289 err.note(&format!(
290 "the `{}` register class supports \
291 the following template modifiers: {}",
292 class.name(),
293 mods
294 ));
295 } else {
296 err.note(&format!(
297 "the `{}` register class does not support template modifiers",
298 class.name()
299 ));
300 }
301 err.emit();
302 }
303 }
304 hir::InlineAsmOperand::Const { .. } => {
305 let mut err = sess.struct_span_err(
306 placeholder_span,
307 "asm template modifiers are not allowed for `const` arguments",
308 );
309 err.span_label(placeholder_span, "template modifier");
310 err.span_label(op_sp, "argument");
311 err.emit();
312 }
04454e1e
FG
313 hir::InlineAsmOperand::SymFn { .. }
314 | hir::InlineAsmOperand::SymStatic { .. } => {
17df50a5
XL
315 let mut err = sess.struct_span_err(
316 placeholder_span,
317 "asm template modifiers are not allowed for `sym` arguments",
318 );
319 err.span_label(placeholder_span, "template modifier");
320 err.span_label(op_sp, "argument");
321 err.emit();
322 }
323 }
324 }
325 }
326
327 let mut used_input_regs = FxHashMap::default();
328 let mut used_output_regs = FxHashMap::default();
c295e0f8 329
17df50a5
XL
330 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
331 if let Some(reg) = op.reg() {
17df50a5
XL
332 let reg_class = reg.reg_class();
333 if reg_class == asm::InlineAsmRegClass::Err {
334 continue;
335 }
336
136023e0
XL
337 // Some register classes can only be used as clobbers. This
338 // means that we disallow passing a value in/out of the asm and
339 // require that the operand name an explicit register, not a
340 // register class.
3c0e092e 341 if reg_class.is_clobber_only(asm_arch.unwrap()) && !op.is_clobber() {
136023e0
XL
342 let msg = format!(
343 "register class `{}` can only be used as a clobber, \
344 not as an input or output",
345 reg_class.name()
346 );
347 sess.struct_span_err(op_sp, &msg).emit();
348 continue;
349 }
350
17df50a5
XL
351 // Check for conflicts between explicit register operands.
352 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
353 let (input, output) = match op {
354 hir::InlineAsmOperand::In { .. } => (true, false),
355
356 // Late output do not conflict with inputs, but normal outputs do
357 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
358
359 hir::InlineAsmOperand::InOut { .. }
360 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
361
04454e1e
FG
362 hir::InlineAsmOperand::Const { .. }
363 | hir::InlineAsmOperand::SymFn { .. }
364 | hir::InlineAsmOperand::SymStatic { .. } => {
17df50a5
XL
365 unreachable!()
366 }
367 };
368
369 // Flag to output the error only once per operand
370 let mut skip = false;
371 reg.overlapping_regs(|r| {
372 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
373 input| {
374 match used_regs.entry(r) {
375 Entry::Occupied(o) => {
376 if skip {
377 return;
378 }
379 skip = true;
380
381 let idx2 = *o.get();
382 let &(ref op2, op_sp2) = &operands[idx2];
5e7ed085
FG
383 let Some(asm::InlineAsmRegOrRegClass::Reg(reg2)) = op2.reg() else {
384 unreachable!();
17df50a5
XL
385 };
386
387 let msg = format!(
388 "register `{}` conflicts with register `{}`",
389 reg.name(),
390 reg2.name()
391 );
392 let mut err = sess.struct_span_err(op_sp, &msg);
393 err.span_label(op_sp, &format!("register `{}`", reg.name()));
394 err.span_label(op_sp2, &format!("register `{}`", reg2.name()));
395
396 match (op, op2) {
397 (
398 hir::InlineAsmOperand::In { .. },
399 hir::InlineAsmOperand::Out { late, .. },
400 )
401 | (
402 hir::InlineAsmOperand::Out { late, .. },
403 hir::InlineAsmOperand::In { .. },
404 ) => {
405 assert!(!*late);
406 let out_op_sp = if input { op_sp2 } else { op_sp };
407 let msg = "use `lateout` instead of \
408 `out` to avoid conflict";
409 err.span_help(out_op_sp, msg);
410 }
411 _ => {}
412 }
413
414 err.emit();
415 }
416 Entry::Vacant(v) => {
5099ac24
FG
417 if r == reg {
418 v.insert(idx);
419 }
17df50a5
XL
420 }
421 }
422 };
423 if input {
424 check(&mut used_input_regs, true);
425 }
426 if output {
427 check(&mut used_output_regs, false);
428 }
429 });
430 }
431 }
432 }
433
94222f64
XL
434 // If a clobber_abi is specified, add the necessary clobbers to the
435 // operands list.
3c0e092e
XL
436 let mut clobbered = FxHashSet::default();
437 for (abi, (_, abi_span)) in clobber_abis {
94222f64 438 for &clobber in abi.clobbered_regs() {
3c0e092e
XL
439 // Don't emit a clobber for a register already clobbered
440 if clobbered.contains(&clobber) {
441 continue;
442 }
443
94222f64
XL
444 let mut output_used = false;
445 clobber.overlapping_regs(|reg| {
446 if used_output_regs.contains_key(&reg) {
447 output_used = true;
448 }
449 });
450
451 if !output_used {
452 operands.push((
453 hir::InlineAsmOperand::Out {
454 reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
455 late: true,
456 expr: None,
457 },
458 self.lower_span(abi_span),
459 ));
3c0e092e 460 clobbered.insert(clobber);
94222f64
XL
461 }
462 }
463 }
464
17df50a5
XL
465 let operands = self.arena.alloc_from_iter(operands);
466 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
94222f64
XL
467 let template_strs = self.arena.alloc_from_iter(
468 asm.template_strs
469 .iter()
470 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
471 );
472 let line_spans =
473 self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
474 let hir_asm =
475 hir::InlineAsm { template, template_strs, operands, options: asm.options, line_spans };
17df50a5
XL
476 self.arena.alloc(hir_asm)
477 }
478}