]>
Commit | Line | Data |
---|---|---|
923072b8 | 1 | use crate::{ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt}; |
04454e1e | 2 | |
f2b60f7d FG |
3 | use super::errors::{ |
4 | AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported, | |
5 | InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst, | |
c620b35d FG |
6 | InvalidAsmTemplateModifierLabel, InvalidAsmTemplateModifierRegClass, |
7 | InvalidAsmTemplateModifierRegClassSub, InvalidAsmTemplateModifierSym, InvalidRegister, | |
8 | InvalidRegisterClass, RegisterClassOnlyClobber, RegisterConflict, | |
f2b60f7d | 9 | }; |
17df50a5 XL |
10 | use super::LoweringContext; |
11 | ||
04454e1e | 12 | use rustc_ast::ptr::P; |
17df50a5 | 13 | use rustc_ast::*; |
487cf647 | 14 | use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap}; |
17df50a5 | 15 | use rustc_hir as hir; |
04454e1e | 16 | use rustc_hir::def::{DefKind, Res}; |
3c0e092e | 17 | use rustc_session::parse::feature_err; |
4b012472 | 18 | use rustc_span::symbol::kw; |
923072b8 | 19 | use rustc_span::{sym, Span}; |
17df50a5 XL |
20 | use rustc_target::asm; |
21 | use std::collections::hash_map::Entry; | |
22 | use std::fmt::Write; | |
23 | ||
24 | impl<'a, 'hir> LoweringContext<'a, 'hir> { | |
c620b35d | 25 | #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable |
923072b8 FG |
26 | pub(crate) fn lower_inline_asm( |
27 | &mut self, | |
28 | sp: Span, | |
29 | asm: &InlineAsm, | |
30 | ) -> &'hir hir::InlineAsm<'hir> { | |
3c0e092e XL |
31 | // Rustdoc needs to support asm! from foreign architectures: don't try |
32 | // lowering the register constraints in this case. | |
064997fb FG |
33 | let asm_arch = |
34 | if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch }; | |
35 | if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc { | |
c0240ec0 | 36 | self.dcx().emit_err(InlineAsmUnsupportedTarget { span: sp }); |
17df50a5 | 37 | } |
3c0e092e XL |
38 | if let Some(asm_arch) = asm_arch { |
39 | // Inline assembly is currently only stable for these architectures. | |
40 | let is_stable = matches!( | |
41 | asm_arch, | |
42 | asm::InlineAsmArch::X86 | |
43 | | asm::InlineAsmArch::X86_64 | |
44 | | asm::InlineAsmArch::Arm | |
45 | | asm::InlineAsmArch::AArch64 | |
46 | | asm::InlineAsmArch::RiscV32 | |
47 | | asm::InlineAsmArch::RiscV64 | |
fe692bf9 | 48 | | asm::InlineAsmArch::LoongArch64 |
3c0e092e | 49 | ); |
064997fb | 50 | if !is_stable && !self.tcx.features().asm_experimental_arch { |
3c0e092e | 51 | feature_err( |
c0240ec0 | 52 | &self.tcx.sess, |
3c0e092e XL |
53 | sym::asm_experimental_arch, |
54 | sp, | |
55 | "inline assembly is not stable yet on this architecture", | |
56 | ) | |
57 | .emit(); | |
58 | } | |
59 | } | |
17df50a5 XL |
60 | if asm.options.contains(InlineAsmOptions::ATT_SYNTAX) |
61 | && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64)) | |
064997fb | 62 | && !self.tcx.sess.opts.actually_rustdoc |
17df50a5 | 63 | { |
c0240ec0 | 64 | self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp }); |
17df50a5 | 65 | } |
064997fb | 66 | if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind { |
c0240ec0 FG |
67 | feature_err(&self.tcx.sess, sym::asm_unwind, sp, "the `may_unwind` option is unstable") |
68 | .emit(); | |
a2a8927a | 69 | } |
17df50a5 | 70 | |
487cf647 | 71 | let mut clobber_abis = FxIndexMap::default(); |
94222f64 | 72 | if let Some(asm_arch) = asm_arch { |
3c0e092e | 73 | for (abi_name, abi_span) in &asm.clobber_abis { |
064997fb | 74 | match asm::InlineAsmClobberAbi::parse(asm_arch, &self.tcx.sess.target, *abi_name) { |
3c0e092e XL |
75 | Ok(abi) => { |
76 | // If the abi was already in the list, emit an error | |
77 | match clobber_abis.get(&abi) { | |
78 | Some((prev_name, prev_sp)) => { | |
3c0e092e XL |
79 | // Multiple different abi names may actually be the same ABI |
80 | // If the specified ABIs are not the same name, alert the user that they resolve to the same ABI | |
064997fb | 81 | let source_map = self.tcx.sess.source_map(); |
f2b60f7d FG |
82 | let equivalent = (source_map.span_to_snippet(*prev_sp) |
83 | != source_map.span_to_snippet(*abi_span)) | |
84 | .then_some(()); | |
3c0e092e | 85 | |
c0240ec0 | 86 | self.dcx().emit_err(AbiSpecifiedMultipleTimes { |
f2b60f7d FG |
87 | abi_span: *abi_span, |
88 | prev_name: *prev_name, | |
89 | prev_span: *prev_sp, | |
90 | equivalent, | |
91 | }); | |
3c0e092e XL |
92 | } |
93 | None => { | |
f2b60f7d | 94 | clobber_abis.insert(abi, (*abi_name, *abi_span)); |
3c0e092e XL |
95 | } |
96 | } | |
97 | } | |
94222f64 | 98 | Err(&[]) => { |
c0240ec0 | 99 | self.dcx().emit_err(ClobberAbiNotSupported { abi_span: *abi_span }); |
94222f64 XL |
100 | } |
101 | Err(supported_abis) => { | |
94222f64 XL |
102 | let mut abis = format!("`{}`", supported_abis[0]); |
103 | for m in &supported_abis[1..] { | |
9c376795 | 104 | let _ = write!(abis, ", `{m}`"); |
94222f64 | 105 | } |
c0240ec0 | 106 | self.dcx().emit_err(InvalidAbiClobberAbi { |
f2b60f7d FG |
107 | abi_span: *abi_span, |
108 | supported_abis: abis, | |
109 | }); | |
94222f64 XL |
110 | } |
111 | } | |
112 | } | |
113 | } | |
114 | ||
17df50a5 XL |
115 | // Lower operands to HIR. We use dummy register classes if an error |
116 | // occurs during lowering because we still need to be able to produce a | |
117 | // valid HIR. | |
064997fb | 118 | let sess = self.tcx.sess; |
94222f64 | 119 | let mut operands: Vec<_> = asm |
17df50a5 XL |
120 | .operands |
121 | .iter() | |
122 | .map(|(op, op_sp)| { | |
487cf647 | 123 | let lower_reg = |®: &_| match reg { |
f2b60f7d | 124 | InlineAsmRegOrRegClass::Reg(reg) => { |
17df50a5 | 125 | asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch { |
f2b60f7d | 126 | asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| { |
c0240ec0 FG |
127 | self.dcx().emit_err(InvalidRegister { |
128 | op_span: *op_sp, | |
129 | reg, | |
130 | error, | |
131 | }); | |
17df50a5 XL |
132 | asm::InlineAsmReg::Err |
133 | }) | |
134 | } else { | |
135 | asm::InlineAsmReg::Err | |
136 | }) | |
137 | } | |
f2b60f7d | 138 | InlineAsmRegOrRegClass::RegClass(reg_class) => { |
17df50a5 | 139 | asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch { |
f2b60f7d FG |
140 | asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else( |
141 | |error| { | |
c0240ec0 | 142 | self.dcx().emit_err(InvalidRegisterClass { |
f2b60f7d FG |
143 | op_span: *op_sp, |
144 | reg_class, | |
145 | error, | |
146 | }); | |
147 | asm::InlineAsmRegClass::Err | |
148 | }, | |
149 | ) | |
17df50a5 XL |
150 | } else { |
151 | asm::InlineAsmRegClass::Err | |
152 | }) | |
153 | } | |
154 | }; | |
155 | ||
487cf647 FG |
156 | let op = match op { |
157 | InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In { | |
5e7ed085 | 158 | reg: lower_reg(reg), |
f2b60f7d | 159 | expr: self.lower_expr(expr), |
17df50a5 | 160 | }, |
487cf647 | 161 | InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out { |
5e7ed085 | 162 | reg: lower_reg(reg), |
487cf647 | 163 | late: *late, |
f2b60f7d | 164 | expr: expr.as_ref().map(|expr| self.lower_expr(expr)), |
17df50a5 | 165 | }, |
487cf647 FG |
166 | InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut { |
167 | reg: lower_reg(reg), | |
168 | late: *late, | |
169 | expr: self.lower_expr(expr), | |
170 | }, | |
171 | InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => { | |
17df50a5 | 172 | hir::InlineAsmOperand::SplitInOut { |
5e7ed085 | 173 | reg: lower_reg(reg), |
487cf647 | 174 | late: *late, |
f2b60f7d FG |
175 | in_expr: self.lower_expr(in_expr), |
176 | out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)), | |
17df50a5 XL |
177 | } |
178 | } | |
487cf647 | 179 | InlineAsmOperand::Const { anon_const } => { |
064997fb | 180 | if !self.tcx.features().asm_const { |
3c0e092e | 181 | feature_err( |
c0240ec0 | 182 | sess, |
3c0e092e XL |
183 | sym::asm_const, |
184 | *op_sp, | |
185 | "const operands for inline assembly are unstable", | |
186 | ) | |
187 | .emit(); | |
188 | } | |
189 | hir::InlineAsmOperand::Const { | |
190 | anon_const: self.lower_anon_const(anon_const), | |
191 | } | |
192 | } | |
487cf647 | 193 | InlineAsmOperand::Sym { sym } => { |
04454e1e FG |
194 | let static_def_id = self |
195 | .resolver | |
196 | .get_partial_res(sym.id) | |
2b03887a FG |
197 | .and_then(|res| res.full_res()) |
198 | .and_then(|res| match res { | |
c620b35d | 199 | Res::Def(DefKind::Static { .. }, def_id) => Some(def_id), |
2b03887a | 200 | _ => None, |
04454e1e FG |
201 | }); |
202 | ||
203 | if let Some(def_id) = static_def_id { | |
204 | let path = self.lower_qpath( | |
205 | sym.id, | |
206 | &sym.qself, | |
207 | &sym.path, | |
208 | ParamMode::Optional, | |
c620b35d | 209 | ImplTraitContext::Disallowed(ImplTraitPosition::Path), |
add651ee | 210 | None, |
04454e1e FG |
211 | ); |
212 | hir::InlineAsmOperand::SymStatic { path, def_id } | |
213 | } else { | |
214 | // Replace the InlineAsmSym AST node with an | |
215 | // Expr using the name node id. | |
216 | let expr = Expr { | |
217 | id: sym.id, | |
218 | kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()), | |
219 | span: *op_sp, | |
220 | attrs: AttrVec::new(), | |
221 | tokens: None, | |
222 | }; | |
223 | ||
224 | // Wrap the expression in an AnonConst. | |
225 | let parent_def_id = self.current_hir_id_owner; | |
923072b8 | 226 | let node_id = self.next_node_id(); |
487cf647 FG |
227 | self.create_def( |
228 | parent_def_id.def_id, | |
229 | node_id, | |
4b012472 FG |
230 | kw::Empty, |
231 | DefKind::AnonConst, | |
487cf647 FG |
232 | *op_sp, |
233 | ); | |
04454e1e FG |
234 | let anon_const = AnonConst { id: node_id, value: P(expr) }; |
235 | hir::InlineAsmOperand::SymFn { | |
236 | anon_const: self.lower_anon_const(&anon_const), | |
237 | } | |
238 | } | |
17df50a5 | 239 | } |
c620b35d FG |
240 | InlineAsmOperand::Label { block } => { |
241 | if !self.tcx.features().asm_goto { | |
242 | feature_err( | |
243 | sess, | |
244 | sym::asm_goto, | |
245 | *op_sp, | |
246 | "label operands for inline assembly are unstable", | |
247 | ) | |
248 | .emit(); | |
249 | } | |
250 | hir::InlineAsmOperand::Label { block: self.lower_block(block, false) } | |
251 | } | |
17df50a5 | 252 | }; |
94222f64 | 253 | (op, self.lower_span(*op_sp)) |
17df50a5 XL |
254 | }) |
255 | .collect(); | |
256 | ||
257 | // Validate template modifiers against the register classes for the operands | |
258 | for p in &asm.template { | |
259 | if let InlineAsmTemplatePiece::Placeholder { | |
260 | operand_idx, | |
261 | modifier: Some(modifier), | |
262 | span: placeholder_span, | |
263 | } = *p | |
264 | { | |
265 | let op_sp = asm.operands[operand_idx].1; | |
266 | match &operands[operand_idx].0 { | |
267 | hir::InlineAsmOperand::In { reg, .. } | |
268 | | hir::InlineAsmOperand::Out { reg, .. } | |
269 | | hir::InlineAsmOperand::InOut { reg, .. } | |
270 | | hir::InlineAsmOperand::SplitInOut { reg, .. } => { | |
271 | let class = reg.reg_class(); | |
272 | if class == asm::InlineAsmRegClass::Err { | |
273 | continue; | |
274 | } | |
275 | let valid_modifiers = class.valid_modifiers(asm_arch.unwrap()); | |
276 | if !valid_modifiers.contains(&modifier) { | |
f2b60f7d | 277 | let sub = if !valid_modifiers.is_empty() { |
17df50a5 XL |
278 | let mut mods = format!("`{}`", valid_modifiers[0]); |
279 | for m in &valid_modifiers[1..] { | |
9c376795 | 280 | let _ = write!(mods, ", `{m}`"); |
17df50a5 | 281 | } |
f2b60f7d FG |
282 | InvalidAsmTemplateModifierRegClassSub::SupportModifier { |
283 | class_name: class.name(), | |
284 | modifiers: mods, | |
285 | } | |
17df50a5 | 286 | } else { |
f2b60f7d FG |
287 | InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier { |
288 | class_name: class.name(), | |
289 | } | |
290 | }; | |
c0240ec0 | 291 | self.dcx().emit_err(InvalidAsmTemplateModifierRegClass { |
f2b60f7d FG |
292 | placeholder_span, |
293 | op_span: op_sp, | |
294 | sub, | |
295 | }); | |
17df50a5 XL |
296 | } |
297 | } | |
298 | hir::InlineAsmOperand::Const { .. } => { | |
c0240ec0 | 299 | self.dcx().emit_err(InvalidAsmTemplateModifierConst { |
17df50a5 | 300 | placeholder_span, |
f2b60f7d FG |
301 | op_span: op_sp, |
302 | }); | |
17df50a5 | 303 | } |
04454e1e FG |
304 | hir::InlineAsmOperand::SymFn { .. } |
305 | | hir::InlineAsmOperand::SymStatic { .. } => { | |
c0240ec0 | 306 | self.dcx().emit_err(InvalidAsmTemplateModifierSym { |
17df50a5 | 307 | placeholder_span, |
f2b60f7d FG |
308 | op_span: op_sp, |
309 | }); | |
17df50a5 | 310 | } |
c620b35d FG |
311 | hir::InlineAsmOperand::Label { .. } => { |
312 | self.dcx().emit_err(InvalidAsmTemplateModifierLabel { | |
313 | placeholder_span, | |
314 | op_span: op_sp, | |
315 | }); | |
316 | } | |
17df50a5 XL |
317 | } |
318 | } | |
319 | } | |
320 | ||
321 | let mut used_input_regs = FxHashMap::default(); | |
322 | let mut used_output_regs = FxHashMap::default(); | |
c295e0f8 | 323 | |
17df50a5 XL |
324 | for (idx, &(ref op, op_sp)) in operands.iter().enumerate() { |
325 | if let Some(reg) = op.reg() { | |
17df50a5 XL |
326 | let reg_class = reg.reg_class(); |
327 | if reg_class == asm::InlineAsmRegClass::Err { | |
328 | continue; | |
329 | } | |
330 | ||
136023e0 XL |
331 | // Some register classes can only be used as clobbers. This |
332 | // means that we disallow passing a value in/out of the asm and | |
333 | // require that the operand name an explicit register, not a | |
334 | // register class. | |
3c0e092e | 335 | if reg_class.is_clobber_only(asm_arch.unwrap()) && !op.is_clobber() { |
c0240ec0 | 336 | self.dcx().emit_err(RegisterClassOnlyClobber { |
f2b60f7d FG |
337 | op_span: op_sp, |
338 | reg_class_name: reg_class.name(), | |
339 | }); | |
136023e0 XL |
340 | continue; |
341 | } | |
342 | ||
17df50a5 XL |
343 | // Check for conflicts between explicit register operands. |
344 | if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg { | |
345 | let (input, output) = match op { | |
346 | hir::InlineAsmOperand::In { .. } => (true, false), | |
347 | ||
348 | // Late output do not conflict with inputs, but normal outputs do | |
349 | hir::InlineAsmOperand::Out { late, .. } => (!late, true), | |
350 | ||
351 | hir::InlineAsmOperand::InOut { .. } | |
352 | | hir::InlineAsmOperand::SplitInOut { .. } => (true, true), | |
353 | ||
04454e1e FG |
354 | hir::InlineAsmOperand::Const { .. } |
355 | | hir::InlineAsmOperand::SymFn { .. } | |
c620b35d FG |
356 | | hir::InlineAsmOperand::SymStatic { .. } |
357 | | hir::InlineAsmOperand::Label { .. } => { | |
4b012472 | 358 | unreachable!("{op:?} is not a register operand"); |
17df50a5 XL |
359 | } |
360 | }; | |
361 | ||
362 | // Flag to output the error only once per operand | |
363 | let mut skip = false; | |
17df50a5 | 364 | |
4b012472 FG |
365 | let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>, |
366 | input, | |
367 | r: asm::InlineAsmReg| { | |
368 | match used_regs.entry(r) { | |
369 | Entry::Occupied(o) => { | |
370 | if skip { | |
371 | return; | |
372 | } | |
373 | skip = true; | |
17df50a5 | 374 | |
4b012472 FG |
375 | let idx2 = *o.get(); |
376 | let (ref op2, op_sp2) = operands[idx2]; | |
17df50a5 | 377 | |
4b012472 FG |
378 | let in_out = match (op, op2) { |
379 | ( | |
380 | hir::InlineAsmOperand::In { .. }, | |
381 | hir::InlineAsmOperand::Out { late, .. }, | |
382 | ) | |
383 | | ( | |
384 | hir::InlineAsmOperand::Out { late, .. }, | |
385 | hir::InlineAsmOperand::In { .. }, | |
386 | ) => { | |
387 | assert!(!*late); | |
388 | let out_op_sp = if input { op_sp2 } else { op_sp }; | |
389 | Some(out_op_sp) | |
390 | } | |
391 | _ => None, | |
392 | }; | |
393 | let reg_str = |idx| -> &str { | |
394 | // HIR asm doesn't preserve the original alias string of the explicit register, | |
395 | // so we have to retrieve it from AST | |
396 | let (op, _): &(InlineAsmOperand, Span) = &asm.operands[idx]; | |
397 | if let Some(ast::InlineAsmRegOrRegClass::Reg(reg_sym)) = | |
398 | op.reg() | |
399 | { | |
400 | reg_sym.as_str() | |
401 | } else { | |
402 | unreachable!("{op:?} is not a register operand"); | |
5099ac24 | 403 | } |
4b012472 FG |
404 | }; |
405 | ||
c0240ec0 | 406 | self.dcx().emit_err(RegisterConflict { |
4b012472 FG |
407 | op_span1: op_sp, |
408 | op_span2: op_sp2, | |
409 | reg1_name: reg_str(idx), | |
410 | reg2_name: reg_str(idx2), | |
411 | in_out, | |
412 | }); | |
413 | } | |
414 | Entry::Vacant(v) => { | |
415 | if r == reg { | |
416 | v.insert(idx); | |
17df50a5 XL |
417 | } |
418 | } | |
4b012472 FG |
419 | } |
420 | }; | |
421 | let mut overlapping_with = vec![]; | |
422 | reg.overlapping_regs(|r| { | |
423 | overlapping_with.push(r); | |
424 | }); | |
425 | for r in overlapping_with { | |
17df50a5 | 426 | if input { |
4b012472 | 427 | check(&mut used_input_regs, true, r); |
17df50a5 XL |
428 | } |
429 | if output { | |
4b012472 | 430 | check(&mut used_output_regs, false, r); |
17df50a5 | 431 | } |
4b012472 | 432 | } |
17df50a5 XL |
433 | } |
434 | } | |
435 | } | |
436 | ||
94222f64 XL |
437 | // If a clobber_abi is specified, add the necessary clobbers to the |
438 | // operands list. | |
3c0e092e XL |
439 | let mut clobbered = FxHashSet::default(); |
440 | for (abi, (_, abi_span)) in clobber_abis { | |
94222f64 | 441 | for &clobber in abi.clobbered_regs() { |
3c0e092e XL |
442 | // Don't emit a clobber for a register already clobbered |
443 | if clobbered.contains(&clobber) { | |
444 | continue; | |
445 | } | |
446 | ||
4b012472 | 447 | let mut overlapping_with = vec![]; |
94222f64 | 448 | clobber.overlapping_regs(|reg| { |
4b012472 | 449 | overlapping_with.push(reg); |
94222f64 | 450 | }); |
4b012472 FG |
451 | let output_used = |
452 | overlapping_with.iter().any(|reg| used_output_regs.contains_key(®)); | |
94222f64 XL |
453 | |
454 | if !output_used { | |
455 | operands.push(( | |
456 | hir::InlineAsmOperand::Out { | |
457 | reg: asm::InlineAsmRegOrRegClass::Reg(clobber), | |
458 | late: true, | |
459 | expr: None, | |
460 | }, | |
461 | self.lower_span(abi_span), | |
462 | )); | |
3c0e092e | 463 | clobbered.insert(clobber); |
94222f64 XL |
464 | } |
465 | } | |
466 | } | |
467 | ||
17df50a5 XL |
468 | let operands = self.arena.alloc_from_iter(operands); |
469 | let template = self.arena.alloc_from_iter(asm.template.iter().cloned()); | |
94222f64 XL |
470 | let template_strs = self.arena.alloc_from_iter( |
471 | asm.template_strs | |
472 | .iter() | |
473 | .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))), | |
474 | ); | |
475 | let line_spans = | |
476 | self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span))); | |
477 | let hir_asm = | |
478 | hir::InlineAsm { template, template_strs, operands, options: asm.options, line_spans }; | |
17df50a5 XL |
479 | self.arena.alloc(hir_asm) |
480 | } | |
481 | } |