]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_ast_lowering/src/expr.rs
New upstream version 1.48.0~beta.8+dfsg1
[rustc.git] / compiler / rustc_ast_lowering / src / expr.rs
1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
2
3 use rustc_ast::attr;
4 use rustc_ast::ptr::P as AstP;
5 use rustc_ast::*;
6 use rustc_data_structures::fx::FxHashMap;
7 use rustc_data_structures::stack::ensure_sufficient_stack;
8 use rustc_data_structures::thin_vec::ThinVec;
9 use rustc_errors::struct_span_err;
10 use rustc_hir as hir;
11 use rustc_hir::def::Res;
12 use rustc_span::hygiene::ForLoopLoc;
13 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
14 use rustc_span::symbol::{sym, Ident, Symbol};
15 use rustc_target::asm;
16 use std::collections::hash_map::Entry;
17 use std::fmt::Write;
18
19 impl<'hir> LoweringContext<'_, 'hir> {
20 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
21 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
22 }
23
24 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
25 self.arena.alloc(self.lower_expr_mut(e))
26 }
27
28 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
29 ensure_sufficient_stack(|| {
30 let kind = match e.kind {
31 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
32 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
33 ExprKind::Repeat(ref expr, ref count) => {
34 let expr = self.lower_expr(expr);
35 let count = self.lower_anon_const(count);
36 hir::ExprKind::Repeat(expr, count)
37 }
38 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
39 ExprKind::Call(ref f, ref args) => {
40 let f = self.lower_expr(f);
41 hir::ExprKind::Call(f, self.lower_exprs(args))
42 }
43 ExprKind::MethodCall(ref seg, ref args, span) => {
44 let hir_seg = self.arena.alloc(self.lower_path_segment(
45 e.span,
46 seg,
47 ParamMode::Optional,
48 0,
49 ParenthesizedGenericArgs::Err,
50 ImplTraitContext::disallowed(),
51 None,
52 ));
53 let args = self.lower_exprs(args);
54 hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
55 }
56 ExprKind::Binary(binop, ref lhs, ref rhs) => {
57 let binop = self.lower_binop(binop);
58 let lhs = self.lower_expr(lhs);
59 let rhs = self.lower_expr(rhs);
60 hir::ExprKind::Binary(binop, lhs, rhs)
61 }
62 ExprKind::Unary(op, ref ohs) => {
63 let op = self.lower_unop(op);
64 let ohs = self.lower_expr(ohs);
65 hir::ExprKind::Unary(op, ohs)
66 }
67 ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
68 ExprKind::Cast(ref expr, ref ty) => {
69 let expr = self.lower_expr(expr);
70 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
71 hir::ExprKind::Cast(expr, ty)
72 }
73 ExprKind::Type(ref expr, ref ty) => {
74 let expr = self.lower_expr(expr);
75 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
76 hir::ExprKind::Type(expr, ty)
77 }
78 ExprKind::AddrOf(k, m, ref ohs) => {
79 let ohs = self.lower_expr(ohs);
80 hir::ExprKind::AddrOf(k, m, ohs)
81 }
82 ExprKind::Let(ref pat, ref scrutinee) => {
83 self.lower_expr_let(e.span, pat, scrutinee)
84 }
85 ExprKind::If(ref cond, ref then, ref else_opt) => {
86 self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
87 }
88 ExprKind::While(ref cond, ref body, opt_label) => self
89 .with_loop_scope(e.id, |this| {
90 this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
91 }),
92 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
93 hir::ExprKind::Loop(
94 this.lower_block(body, false),
95 opt_label,
96 hir::LoopSource::Loop,
97 )
98 }),
99 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
100 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
101 self.lower_expr(expr),
102 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
103 hir::MatchSource::Normal,
104 ),
105 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
106 .make_async_expr(
107 capture_clause,
108 closure_node_id,
109 None,
110 block.span,
111 hir::AsyncGeneratorKind::Block,
112 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
113 ),
114 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
115 ExprKind::Closure(
116 capture_clause,
117 asyncness,
118 movability,
119 ref decl,
120 ref body,
121 fn_decl_span,
122 ) => {
123 if let Async::Yes { closure_id, .. } = asyncness {
124 self.lower_expr_async_closure(
125 capture_clause,
126 closure_id,
127 decl,
128 body,
129 fn_decl_span,
130 )
131 } else {
132 self.lower_expr_closure(
133 capture_clause,
134 movability,
135 decl,
136 body,
137 fn_decl_span,
138 )
139 }
140 }
141 ExprKind::Block(ref blk, opt_label) => {
142 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
143 }
144 ExprKind::Assign(ref el, ref er, span) => {
145 hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span)
146 }
147 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
148 self.lower_binop(op),
149 self.lower_expr(el),
150 self.lower_expr(er),
151 ),
152 ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
153 ExprKind::Index(ref el, ref er) => {
154 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
155 }
156 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
157 self.lower_expr_range_closed(e.span, e1, e2)
158 }
159 ExprKind::Range(ref e1, ref e2, lims) => {
160 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
161 }
162 ExprKind::Path(ref qself, ref path) => {
163 let qpath = self.lower_qpath(
164 e.id,
165 qself,
166 path,
167 ParamMode::Optional,
168 ImplTraitContext::disallowed(),
169 );
170 hir::ExprKind::Path(qpath)
171 }
172 ExprKind::Break(opt_label, ref opt_expr) => {
173 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
174 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
175 }
176 ExprKind::Continue(opt_label) => {
177 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
178 }
179 ExprKind::Ret(ref e) => {
180 let e = e.as_ref().map(|x| self.lower_expr(x));
181 hir::ExprKind::Ret(e)
182 }
183 ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
184 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
185 ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
186 let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x));
187 hir::ExprKind::Struct(
188 self.arena.alloc(self.lower_qpath(
189 e.id,
190 &None,
191 path,
192 ParamMode::Optional,
193 ImplTraitContext::disallowed(),
194 )),
195 self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
196 maybe_expr,
197 )
198 }
199 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
200 ExprKind::Err => hir::ExprKind::Err,
201 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
202 ExprKind::Paren(ref ex) => {
203 let mut ex = self.lower_expr_mut(ex);
204 // Include parens in span, but only if it is a super-span.
205 if e.span.contains(ex.span) {
206 ex.span = e.span;
207 }
208 // Merge attributes into the inner expression.
209 let mut attrs = e.attrs.clone();
210 attrs.extend::<Vec<_>>(ex.attrs.into());
211 ex.attrs = attrs;
212 return ex;
213 }
214
215 // Desugar `ExprForLoop`
216 // from: `[opt_ident]: for <pat> in <head> <body>`
217 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
218 return self.lower_expr_for(e, pat, head, body, opt_label);
219 }
220 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
221 };
222
223 hir::Expr {
224 hir_id: self.lower_node_id(e.id),
225 kind,
226 span: e.span,
227 attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
228 }
229 })
230 }
231
232 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
233 match u {
234 UnOp::Deref => hir::UnOp::UnDeref,
235 UnOp::Not => hir::UnOp::UnNot,
236 UnOp::Neg => hir::UnOp::UnNeg,
237 }
238 }
239
240 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
241 Spanned {
242 node: match b.node {
243 BinOpKind::Add => hir::BinOpKind::Add,
244 BinOpKind::Sub => hir::BinOpKind::Sub,
245 BinOpKind::Mul => hir::BinOpKind::Mul,
246 BinOpKind::Div => hir::BinOpKind::Div,
247 BinOpKind::Rem => hir::BinOpKind::Rem,
248 BinOpKind::And => hir::BinOpKind::And,
249 BinOpKind::Or => hir::BinOpKind::Or,
250 BinOpKind::BitXor => hir::BinOpKind::BitXor,
251 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
252 BinOpKind::BitOr => hir::BinOpKind::BitOr,
253 BinOpKind::Shl => hir::BinOpKind::Shl,
254 BinOpKind::Shr => hir::BinOpKind::Shr,
255 BinOpKind::Eq => hir::BinOpKind::Eq,
256 BinOpKind::Lt => hir::BinOpKind::Lt,
257 BinOpKind::Le => hir::BinOpKind::Le,
258 BinOpKind::Ne => hir::BinOpKind::Ne,
259 BinOpKind::Ge => hir::BinOpKind::Ge,
260 BinOpKind::Gt => hir::BinOpKind::Gt,
261 },
262 span: b.span,
263 }
264 }
265
266 /// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
267 /// ```rust
268 /// match scrutinee { pats => true, _ => false }
269 /// ```
270 fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind<'hir> {
271 // If we got here, the `let` expression is not allowed.
272
273 if self.sess.opts.unstable_features.is_nightly_build() {
274 self.sess
275 .struct_span_err(span, "`let` expressions are not supported here")
276 .note("only supported directly in conditions of `if`- and `while`-expressions")
277 .note("as well as when nested within `&&` and parenthesis in those conditions")
278 .emit();
279 } else {
280 self.sess
281 .struct_span_err(span, "expected expression, found statement (`let`)")
282 .note("variable declaration using `let` is a statement")
283 .emit();
284 }
285
286 // For better recovery, we emit:
287 // ```
288 // match scrutinee { pat => true, _ => false }
289 // ```
290 // While this doesn't fully match the user's intent, it has key advantages:
291 // 1. We can avoid using `abort_if_errors`.
292 // 2. We can typeck both `pat` and `scrutinee`.
293 // 3. `pat` is allowed to be refutable.
294 // 4. The return type of the block is `bool` which seems like what the user wanted.
295 let scrutinee = self.lower_expr(scrutinee);
296 let then_arm = {
297 let pat = self.lower_pat(pat);
298 let expr = self.expr_bool(span, true);
299 self.arm(pat, expr)
300 };
301 let else_arm = {
302 let pat = self.pat_wild(span);
303 let expr = self.expr_bool(span, false);
304 self.arm(pat, expr)
305 };
306 hir::ExprKind::Match(
307 scrutinee,
308 arena_vec![self; then_arm, else_arm],
309 hir::MatchSource::Normal,
310 )
311 }
312
313 fn lower_expr_if(
314 &mut self,
315 span: Span,
316 cond: &Expr,
317 then: &Block,
318 else_opt: Option<&Expr>,
319 ) -> hir::ExprKind<'hir> {
320 // FIXME(#53667): handle lowering of && and parens.
321
322 // `_ => else_block` where `else_block` is `{}` if there's `None`:
323 let else_pat = self.pat_wild(span);
324 let (else_expr, contains_else_clause) = match else_opt {
325 None => (self.expr_block_empty(span), false),
326 Some(els) => (self.lower_expr(els), true),
327 };
328 let else_arm = self.arm(else_pat, else_expr);
329
330 // Handle then + scrutinee:
331 let then_expr = self.lower_block_expr(then);
332 let (then_pat, scrutinee, desugar) = match cond.kind {
333 // `<pat> => <then>`:
334 ExprKind::Let(ref pat, ref scrutinee) => {
335 let scrutinee = self.lower_expr(scrutinee);
336 let pat = self.lower_pat(pat);
337 (pat, scrutinee, hir::MatchSource::IfLetDesugar { contains_else_clause })
338 }
339 // `true => <then>`:
340 _ => {
341 // Lower condition:
342 let cond = self.lower_expr(cond);
343 let span_block =
344 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
345 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
346 // to preserve drop semantics since `if cond { ... }` does not
347 // let temporaries live outside of `cond`.
348 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
349 let pat = self.pat_bool(span, true);
350 (pat, cond, hir::MatchSource::IfDesugar { contains_else_clause })
351 }
352 };
353 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
354
355 hir::ExprKind::Match(scrutinee, arena_vec![self; then_arm, else_arm], desugar)
356 }
357
358 fn lower_expr_while_in_loop_scope(
359 &mut self,
360 span: Span,
361 cond: &Expr,
362 body: &Block,
363 opt_label: Option<Label>,
364 ) -> hir::ExprKind<'hir> {
365 // FIXME(#53667): handle lowering of && and parens.
366
367 // Note that the block AND the condition are evaluated in the loop scope.
368 // This is done to allow `break` from inside the condition of the loop.
369
370 // `_ => break`:
371 let else_arm = {
372 let else_pat = self.pat_wild(span);
373 let else_expr = self.expr_break(span, ThinVec::new());
374 self.arm(else_pat, else_expr)
375 };
376
377 // Handle then + scrutinee:
378 let then_expr = self.lower_block_expr(body);
379 let (then_pat, scrutinee, desugar, source) = match cond.kind {
380 ExprKind::Let(ref pat, ref scrutinee) => {
381 // to:
382 //
383 // [opt_ident]: loop {
384 // match <sub_expr> {
385 // <pat> => <body>,
386 // _ => break
387 // }
388 // }
389 let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee));
390 let pat = self.lower_pat(pat);
391 (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet)
392 }
393 _ => {
394 // We desugar: `'label: while $cond $body` into:
395 //
396 // ```
397 // 'label: loop {
398 // match drop-temps { $cond } {
399 // true => $body,
400 // _ => break,
401 // }
402 // }
403 // ```
404
405 // Lower condition:
406 let cond = self.with_loop_condition_scope(|this| this.lower_expr(cond));
407 let span_block =
408 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
409 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
410 // to preserve drop semantics since `while cond { ... }` does not
411 // let temporaries live outside of `cond`.
412 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
413 // `true => <then>`:
414 let pat = self.pat_bool(span, true);
415 (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While)
416 }
417 };
418 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
419
420 // `match <scrutinee> { ... }`
421 let match_expr =
422 self.expr_match(span, scrutinee, arena_vec![self; then_arm, else_arm], desugar);
423
424 // `[opt_ident]: loop { ... }`
425 hir::ExprKind::Loop(self.block_expr(self.arena.alloc(match_expr)), opt_label, source)
426 }
427
428 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_ok(<expr>) }`,
429 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_ok(()) }`
430 /// and save the block id to use it as a break target for desugaring of the `?` operator.
431 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
432 self.with_catch_scope(body.id, |this| {
433 let mut block = this.lower_block_noalloc(body, true);
434
435 let try_span = this.mark_span_with_reason(
436 DesugaringKind::TryBlock,
437 body.span,
438 this.allow_try_trait.clone(),
439 );
440
441 // Final expression of the block (if present) or `()` with span at the end of block
442 let tail_expr = block
443 .expr
444 .take()
445 .unwrap_or_else(|| this.expr_unit(this.sess.source_map().end_point(try_span)));
446
447 let ok_wrapped_span =
448 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
449
450 // `::std::ops::Try::from_ok($tail_expr)`
451 block.expr = Some(this.wrap_in_try_constructor(
452 hir::LangItem::TryFromOk,
453 try_span,
454 tail_expr,
455 ok_wrapped_span,
456 ));
457
458 hir::ExprKind::Block(this.arena.alloc(block), None)
459 })
460 }
461
462 fn wrap_in_try_constructor(
463 &mut self,
464 lang_item: hir::LangItem,
465 method_span: Span,
466 expr: &'hir hir::Expr<'hir>,
467 overall_span: Span,
468 ) -> &'hir hir::Expr<'hir> {
469 let constructor =
470 self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new()));
471 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
472 }
473
474 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
475 hir::Arm {
476 hir_id: self.next_id(),
477 attrs: self.lower_attrs(&arm.attrs),
478 pat: self.lower_pat(&arm.pat),
479 guard: match arm.guard {
480 Some(ref x) => Some(hir::Guard::If(self.lower_expr(x))),
481 _ => None,
482 },
483 body: self.lower_expr(&arm.body),
484 span: arm.span,
485 }
486 }
487
488 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
489 ///
490 /// This results in:
491 ///
492 /// ```text
493 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
494 /// <body>
495 /// })
496 /// ```
497 pub(super) fn make_async_expr(
498 &mut self,
499 capture_clause: CaptureBy,
500 closure_node_id: NodeId,
501 ret_ty: Option<AstP<Ty>>,
502 span: Span,
503 async_gen_kind: hir::AsyncGeneratorKind,
504 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
505 ) -> hir::ExprKind<'hir> {
506 let output = match ret_ty {
507 Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
508 None => hir::FnRetTy::DefaultReturn(span),
509 };
510
511 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
512 // fully constrained by `future::from_generator`.
513 let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
514
515 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
516 let decl = self.arena.alloc(hir::FnDecl {
517 inputs: arena_vec![self; input_ty],
518 output,
519 c_variadic: false,
520 implicit_self: hir::ImplicitSelfKind::None,
521 });
522
523 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
524 let (pat, task_context_hid) = self.pat_ident_binding_mode(
525 span,
526 Ident::with_dummy_span(sym::_task_context),
527 hir::BindingAnnotation::Mutable,
528 );
529 let param = hir::Param { attrs: &[], hir_id: self.next_id(), pat, ty_span: span, span };
530 let params = arena_vec![self; param];
531
532 let body_id = self.lower_body(move |this| {
533 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
534
535 let old_ctx = this.task_context;
536 this.task_context = Some(task_context_hid);
537 let res = body(this);
538 this.task_context = old_ctx;
539 (params, res)
540 });
541
542 // `static |_task_context| -> <ret_ty> { body }`:
543 let generator_kind = hir::ExprKind::Closure(
544 capture_clause,
545 decl,
546 body_id,
547 span,
548 Some(hir::Movability::Static),
549 );
550 let generator = hir::Expr {
551 hir_id: self.lower_node_id(closure_node_id),
552 kind: generator_kind,
553 span,
554 attrs: ThinVec::new(),
555 };
556
557 // `future::from_generator`:
558 let unstable_span =
559 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
560 let gen_future =
561 self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new());
562
563 // `future::from_generator(generator)`:
564 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
565 }
566
567 /// Desugar `<expr>.await` into:
568 /// ```rust
569 /// match <expr> {
570 /// mut pinned => loop {
571 /// match unsafe { ::std::future::Future::poll(
572 /// <::std::pin::Pin>::new_unchecked(&mut pinned),
573 /// ::std::future::get_context(task_context),
574 /// ) } {
575 /// ::std::task::Poll::Ready(result) => break result,
576 /// ::std::task::Poll::Pending => {}
577 /// }
578 /// task_context = yield ();
579 /// }
580 /// }
581 /// ```
582 fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
583 match self.generator_kind {
584 Some(hir::GeneratorKind::Async(_)) => {}
585 Some(hir::GeneratorKind::Gen) | None => {
586 let mut err = struct_span_err!(
587 self.sess,
588 await_span,
589 E0728,
590 "`await` is only allowed inside `async` functions and blocks"
591 );
592 err.span_label(await_span, "only allowed inside `async` functions and blocks");
593 if let Some(item_sp) = self.current_item {
594 err.span_label(item_sp, "this is not `async`");
595 }
596 err.emit();
597 }
598 }
599 let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
600 let gen_future_span = self.mark_span_with_reason(
601 DesugaringKind::Await,
602 await_span,
603 self.allow_gen_future.clone(),
604 );
605 let expr = self.lower_expr(expr);
606
607 let pinned_ident = Ident::with_dummy_span(sym::pinned);
608 let (pinned_pat, pinned_pat_hid) =
609 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
610
611 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
612
613 // unsafe {
614 // ::std::future::Future::poll(
615 // ::std::pin::Pin::new_unchecked(&mut pinned),
616 // ::std::future::get_context(task_context),
617 // )
618 // }
619 let poll_expr = {
620 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
621 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
622 let task_context = if let Some(task_context_hid) = self.task_context {
623 self.expr_ident_mut(span, task_context_ident, task_context_hid)
624 } else {
625 // Use of `await` outside of an async context, we cannot use `task_context` here.
626 self.expr_err(span)
627 };
628 let new_unchecked = self.expr_call_lang_item_fn_mut(
629 span,
630 hir::LangItem::PinNewUnchecked,
631 arena_vec![self; ref_mut_pinned],
632 );
633 let get_context = self.expr_call_lang_item_fn_mut(
634 gen_future_span,
635 hir::LangItem::GetContext,
636 arena_vec![self; task_context],
637 );
638 let call = self.expr_call_lang_item_fn(
639 span,
640 hir::LangItem::FuturePoll,
641 arena_vec![self; new_unchecked, get_context],
642 );
643 self.arena.alloc(self.expr_unsafe(call))
644 };
645
646 // `::std::task::Poll::Ready(result) => break result`
647 let loop_node_id = self.resolver.next_node_id();
648 let loop_hir_id = self.lower_node_id(loop_node_id);
649 let ready_arm = {
650 let x_ident = Ident::with_dummy_span(sym::result);
651 let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
652 let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
653 let ready_field = self.single_pat_field(span, x_pat);
654 let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
655 let break_x = self.with_loop_scope(loop_node_id, move |this| {
656 let expr_break =
657 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
658 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
659 });
660 self.arm(ready_pat, break_x)
661 };
662
663 // `::std::task::Poll::Pending => {}`
664 let pending_arm = {
665 let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
666 let empty_block = self.expr_block_empty(span);
667 self.arm(pending_pat, empty_block)
668 };
669
670 let inner_match_stmt = {
671 let match_expr = self.expr_match(
672 span,
673 poll_expr,
674 arena_vec![self; ready_arm, pending_arm],
675 hir::MatchSource::AwaitDesugar,
676 );
677 self.stmt_expr(span, match_expr)
678 };
679
680 // task_context = yield ();
681 let yield_stmt = {
682 let unit = self.expr_unit(span);
683 let yield_expr = self.expr(
684 span,
685 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
686 ThinVec::new(),
687 );
688 let yield_expr = self.arena.alloc(yield_expr);
689
690 if let Some(task_context_hid) = self.task_context {
691 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
692 let assign =
693 self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
694 self.stmt_expr(span, assign)
695 } else {
696 // Use of `await` outside of an async context. Return `yield_expr` so that we can
697 // proceed with type checking.
698 self.stmt(span, hir::StmtKind::Semi(yield_expr))
699 }
700 };
701
702 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
703
704 // loop { .. }
705 let loop_expr = self.arena.alloc(hir::Expr {
706 hir_id: loop_hir_id,
707 kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop),
708 span,
709 attrs: ThinVec::new(),
710 });
711
712 // mut pinned => loop { ... }
713 let pinned_arm = self.arm(pinned_pat, loop_expr);
714
715 // match <expr> {
716 // mut pinned => loop { .. }
717 // }
718 hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
719 }
720
721 fn lower_expr_closure(
722 &mut self,
723 capture_clause: CaptureBy,
724 movability: Movability,
725 decl: &FnDecl,
726 body: &Expr,
727 fn_decl_span: Span,
728 ) -> hir::ExprKind<'hir> {
729 // Lower outside new scope to preserve `is_in_loop_condition`.
730 let fn_decl = self.lower_fn_decl(decl, None, false, None);
731
732 self.with_new_scopes(move |this| {
733 let prev = this.current_item;
734 this.current_item = Some(fn_decl_span);
735 let mut generator_kind = None;
736 let body_id = this.lower_fn_body(decl, |this| {
737 let e = this.lower_expr_mut(body);
738 generator_kind = this.generator_kind;
739 e
740 });
741 let generator_option =
742 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
743 this.current_item = prev;
744 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, generator_option)
745 })
746 }
747
748 fn generator_movability_for_fn(
749 &mut self,
750 decl: &FnDecl,
751 fn_decl_span: Span,
752 generator_kind: Option<hir::GeneratorKind>,
753 movability: Movability,
754 ) -> Option<hir::Movability> {
755 match generator_kind {
756 Some(hir::GeneratorKind::Gen) => {
757 if decl.inputs.len() > 1 {
758 struct_span_err!(
759 self.sess,
760 fn_decl_span,
761 E0628,
762 "too many parameters for a generator (expected 0 or 1 parameters)"
763 )
764 .emit();
765 }
766 Some(movability)
767 }
768 Some(hir::GeneratorKind::Async(_)) => {
769 panic!("non-`async` closure body turned `async` during lowering");
770 }
771 None => {
772 if movability == Movability::Static {
773 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
774 .emit();
775 }
776 None
777 }
778 }
779 }
780
781 fn lower_expr_async_closure(
782 &mut self,
783 capture_clause: CaptureBy,
784 closure_id: NodeId,
785 decl: &FnDecl,
786 body: &Expr,
787 fn_decl_span: Span,
788 ) -> hir::ExprKind<'hir> {
789 let outer_decl =
790 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
791 // We need to lower the declaration outside the new scope, because we
792 // have to conserve the state of being inside a loop condition for the
793 // closure argument types.
794 let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
795
796 self.with_new_scopes(move |this| {
797 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
798 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
799 struct_span_err!(
800 this.sess,
801 fn_decl_span,
802 E0708,
803 "`async` non-`move` closures with parameters are not currently supported",
804 )
805 .help(
806 "consider using `let` statements to manually capture \
807 variables by reference before entering an `async move` closure",
808 )
809 .emit();
810 }
811
812 // Transform `async |x: u8| -> X { ... }` into
813 // `|x: u8| future_from_generator(|| -> X { ... })`.
814 let body_id = this.lower_fn_body(&outer_decl, |this| {
815 let async_ret_ty =
816 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
817 let async_body = this.make_async_expr(
818 capture_clause,
819 closure_id,
820 async_ret_ty,
821 body.span,
822 hir::AsyncGeneratorKind::Closure,
823 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
824 );
825 this.expr(fn_decl_span, async_body, ThinVec::new())
826 });
827 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, None)
828 })
829 }
830
831 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
832 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
833 let e1 = self.lower_expr_mut(e1);
834 let e2 = self.lower_expr_mut(e2);
835 let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, span);
836 let fn_expr =
837 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
838 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
839 }
840
841 fn lower_expr_range(
842 &mut self,
843 span: Span,
844 e1: Option<&Expr>,
845 e2: Option<&Expr>,
846 lims: RangeLimits,
847 ) -> hir::ExprKind<'hir> {
848 use rustc_ast::RangeLimits::*;
849
850 let lang_item = match (e1, e2, lims) {
851 (None, None, HalfOpen) => hir::LangItem::RangeFull,
852 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
853 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
854 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
855 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
856 (Some(..), Some(..), Closed) => unreachable!(),
857 (_, None, Closed) => {
858 self.diagnostic().span_fatal(span, "inclusive range with no end").raise()
859 }
860 };
861
862 let fields = self.arena.alloc_from_iter(
863 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
864 let expr = self.lower_expr(&e);
865 let ident = Ident::new(Symbol::intern(s), e.span);
866 self.field(ident, expr, e.span)
867 }),
868 );
869
870 hir::ExprKind::Struct(self.arena.alloc(hir::QPath::LangItem(lang_item, span)), fields, None)
871 }
872
873 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
874 let target_id = match destination {
875 Some((id, _)) => {
876 if let Some(loop_id) = self.resolver.get_label_res(id) {
877 Ok(self.lower_node_id(loop_id))
878 } else {
879 Err(hir::LoopIdError::UnresolvedLabel)
880 }
881 }
882 None => self
883 .loop_scopes
884 .last()
885 .cloned()
886 .map(|id| Ok(self.lower_node_id(id)))
887 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
888 };
889 hir::Destination { label: destination.map(|(_, label)| label), target_id }
890 }
891
892 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
893 if self.is_in_loop_condition && opt_label.is_none() {
894 hir::Destination {
895 label: None,
896 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
897 }
898 } else {
899 self.lower_loop_destination(opt_label.map(|label| (id, label)))
900 }
901 }
902
903 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
904 let len = self.catch_scopes.len();
905 self.catch_scopes.push(catch_id);
906
907 let result = f(self);
908 assert_eq!(
909 len + 1,
910 self.catch_scopes.len(),
911 "catch scopes should be added and removed in stack order"
912 );
913
914 self.catch_scopes.pop().unwrap();
915
916 result
917 }
918
919 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
920 // We're no longer in the base loop's condition; we're in another loop.
921 let was_in_loop_condition = self.is_in_loop_condition;
922 self.is_in_loop_condition = false;
923
924 let len = self.loop_scopes.len();
925 self.loop_scopes.push(loop_id);
926
927 let result = f(self);
928 assert_eq!(
929 len + 1,
930 self.loop_scopes.len(),
931 "loop scopes should be added and removed in stack order"
932 );
933
934 self.loop_scopes.pop().unwrap();
935
936 self.is_in_loop_condition = was_in_loop_condition;
937
938 result
939 }
940
941 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
942 let was_in_loop_condition = self.is_in_loop_condition;
943 self.is_in_loop_condition = true;
944
945 let result = f(self);
946
947 self.is_in_loop_condition = was_in_loop_condition;
948
949 result
950 }
951
952 fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
953 if self.sess.asm_arch.is_none() {
954 struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
955 }
956 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
957 && !matches!(
958 self.sess.asm_arch,
959 Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64)
960 )
961 {
962 self.sess
963 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
964 .emit();
965 }
966
967 // Lower operands to HIR, filter_map skips any operands with invalid
968 // register classes.
969 let sess = self.sess;
970 let operands: Vec<_> = asm
971 .operands
972 .iter()
973 .filter_map(|(op, op_sp)| {
974 let lower_reg = |reg| {
975 Some(match reg {
976 InlineAsmRegOrRegClass::Reg(s) => asm::InlineAsmRegOrRegClass::Reg(
977 asm::InlineAsmReg::parse(
978 sess.asm_arch?,
979 |feature| sess.target_features.contains(&Symbol::intern(feature)),
980 &sess.target.target,
981 s,
982 )
983 .map_err(|e| {
984 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
985 sess.struct_span_err(*op_sp, &msg).emit();
986 })
987 .ok()?,
988 ),
989 InlineAsmRegOrRegClass::RegClass(s) => {
990 asm::InlineAsmRegOrRegClass::RegClass(
991 asm::InlineAsmRegClass::parse(sess.asm_arch?, s)
992 .map_err(|e| {
993 let msg = format!(
994 "invalid register class `{}`: {}",
995 s.as_str(),
996 e
997 );
998 sess.struct_span_err(*op_sp, &msg).emit();
999 })
1000 .ok()?,
1001 )
1002 }
1003 })
1004 };
1005
1006 // lower_reg is executed last because we need to lower all
1007 // sub-expressions even if we throw them away later.
1008 let op = match *op {
1009 InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
1010 expr: self.lower_expr_mut(expr),
1011 reg: lower_reg(reg)?,
1012 },
1013 InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
1014 late,
1015 expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1016 reg: lower_reg(reg)?,
1017 },
1018 InlineAsmOperand::InOut { reg, late, ref expr } => {
1019 hir::InlineAsmOperand::InOut {
1020 late,
1021 expr: self.lower_expr_mut(expr),
1022 reg: lower_reg(reg)?,
1023 }
1024 }
1025 InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
1026 hir::InlineAsmOperand::SplitInOut {
1027 late,
1028 in_expr: self.lower_expr_mut(in_expr),
1029 out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1030 reg: lower_reg(reg)?,
1031 }
1032 }
1033 InlineAsmOperand::Const { ref expr } => {
1034 hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
1035 }
1036 InlineAsmOperand::Sym { ref expr } => {
1037 hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
1038 }
1039 };
1040 Some(op)
1041 })
1042 .collect();
1043
1044 // Stop if there were any errors when lowering the register classes
1045 if operands.len() != asm.operands.len() || sess.asm_arch.is_none() {
1046 return hir::ExprKind::Err;
1047 }
1048
1049 // Validate template modifiers against the register classes for the operands
1050 let asm_arch = sess.asm_arch.unwrap();
1051 for p in &asm.template {
1052 if let InlineAsmTemplatePiece::Placeholder {
1053 operand_idx,
1054 modifier: Some(modifier),
1055 span: placeholder_span,
1056 } = *p
1057 {
1058 let op_sp = asm.operands[operand_idx].1;
1059 match &operands[operand_idx] {
1060 hir::InlineAsmOperand::In { reg, .. }
1061 | hir::InlineAsmOperand::Out { reg, .. }
1062 | hir::InlineAsmOperand::InOut { reg, .. }
1063 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
1064 let class = reg.reg_class();
1065 let valid_modifiers = class.valid_modifiers(asm_arch);
1066 if !valid_modifiers.contains(&modifier) {
1067 let mut err = sess.struct_span_err(
1068 placeholder_span,
1069 "invalid asm template modifier for this register class",
1070 );
1071 err.span_label(placeholder_span, "template modifier");
1072 err.span_label(op_sp, "argument");
1073 if !valid_modifiers.is_empty() {
1074 let mut mods = format!("`{}`", valid_modifiers[0]);
1075 for m in &valid_modifiers[1..] {
1076 let _ = write!(mods, ", `{}`", m);
1077 }
1078 err.note(&format!(
1079 "the `{}` register class supports \
1080 the following template modifiers: {}",
1081 class.name(),
1082 mods
1083 ));
1084 } else {
1085 err.note(&format!(
1086 "the `{}` register class does not support template modifiers",
1087 class.name()
1088 ));
1089 }
1090 err.emit();
1091 }
1092 }
1093 hir::InlineAsmOperand::Const { .. } => {
1094 let mut err = sess.struct_span_err(
1095 placeholder_span,
1096 "asm template modifiers are not allowed for `const` arguments",
1097 );
1098 err.span_label(placeholder_span, "template modifier");
1099 err.span_label(op_sp, "argument");
1100 err.emit();
1101 }
1102 hir::InlineAsmOperand::Sym { .. } => {
1103 let mut err = sess.struct_span_err(
1104 placeholder_span,
1105 "asm template modifiers are not allowed for `sym` arguments",
1106 );
1107 err.span_label(placeholder_span, "template modifier");
1108 err.span_label(op_sp, "argument");
1109 err.emit();
1110 }
1111 }
1112 }
1113 }
1114
1115 let mut used_input_regs = FxHashMap::default();
1116 let mut used_output_regs = FxHashMap::default();
1117 for (idx, op) in operands.iter().enumerate() {
1118 let op_sp = asm.operands[idx].1;
1119 if let Some(reg) = op.reg() {
1120 // Validate register classes against currently enabled target
1121 // features. We check that at least one type is available for
1122 // the current target.
1123 let reg_class = reg.reg_class();
1124 let mut required_features: Vec<&str> = vec![];
1125 for &(_, feature) in reg_class.supported_types(asm_arch) {
1126 if let Some(feature) = feature {
1127 if self.sess.target_features.contains(&Symbol::intern(feature)) {
1128 required_features.clear();
1129 break;
1130 } else {
1131 required_features.push(feature);
1132 }
1133 } else {
1134 required_features.clear();
1135 break;
1136 }
1137 }
1138 // We are sorting primitive strs here and can use unstable sort here
1139 required_features.sort_unstable();
1140 required_features.dedup();
1141 match &required_features[..] {
1142 [] => {}
1143 [feature] => {
1144 let msg = format!(
1145 "register class `{}` requires the `{}` target feature",
1146 reg_class.name(),
1147 feature
1148 );
1149 sess.struct_span_err(op_sp, &msg).emit();
1150 }
1151 features => {
1152 let msg = format!(
1153 "register class `{}` requires at least one target feature: {}",
1154 reg_class.name(),
1155 features.join(", ")
1156 );
1157 sess.struct_span_err(op_sp, &msg).emit();
1158 }
1159 }
1160
1161 // Check for conflicts between explicit register operands.
1162 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
1163 let (input, output) = match op {
1164 hir::InlineAsmOperand::In { .. } => (true, false),
1165 // Late output do not conflict with inputs, but normal outputs do
1166 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
1167 hir::InlineAsmOperand::InOut { .. }
1168 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
1169 hir::InlineAsmOperand::Const { .. } | hir::InlineAsmOperand::Sym { .. } => {
1170 unreachable!()
1171 }
1172 };
1173
1174 // Flag to output the error only once per operand
1175 let mut skip = false;
1176 reg.overlapping_regs(|r| {
1177 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
1178 input| {
1179 match used_regs.entry(r) {
1180 Entry::Occupied(o) => {
1181 if !skip {
1182 skip = true;
1183
1184 let idx2 = *o.get();
1185 let op2 = &operands[idx2];
1186 let op_sp2 = asm.operands[idx2].1;
1187 let reg2 = match op2.reg() {
1188 Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
1189 _ => unreachable!(),
1190 };
1191
1192 let msg = format!(
1193 "register `{}` conflicts with register `{}`",
1194 reg.name(),
1195 reg2.name()
1196 );
1197 let mut err = sess.struct_span_err(op_sp, &msg);
1198 err.span_label(
1199 op_sp,
1200 &format!("register `{}`", reg.name()),
1201 );
1202 err.span_label(
1203 op_sp2,
1204 &format!("register `{}`", reg2.name()),
1205 );
1206
1207 match (op, op2) {
1208 (
1209 hir::InlineAsmOperand::In { .. },
1210 hir::InlineAsmOperand::Out { late, .. },
1211 )
1212 | (
1213 hir::InlineAsmOperand::Out { late, .. },
1214 hir::InlineAsmOperand::In { .. },
1215 ) => {
1216 assert!(!*late);
1217 let out_op_sp = if input { op_sp2 } else { op_sp };
1218 let msg = "use `lateout` instead of \
1219 `out` to avoid conflict";
1220 err.span_help(out_op_sp, msg);
1221 }
1222 _ => {}
1223 }
1224
1225 err.emit();
1226 }
1227 }
1228 Entry::Vacant(v) => {
1229 v.insert(idx);
1230 }
1231 }
1232 };
1233 if input {
1234 check(&mut used_input_regs, true);
1235 }
1236 if output {
1237 check(&mut used_output_regs, false);
1238 }
1239 });
1240 }
1241 }
1242 }
1243
1244 let operands = self.arena.alloc_from_iter(operands);
1245 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
1246 let line_spans = self.arena.alloc_slice(&asm.line_spans[..]);
1247 let hir_asm = hir::InlineAsm { template, operands, options: asm.options, line_spans };
1248 hir::ExprKind::InlineAsm(self.arena.alloc(hir_asm))
1249 }
1250
1251 fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1252 let inner = hir::LlvmInlineAsmInner {
1253 inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1254 outputs: asm
1255 .outputs
1256 .iter()
1257 .map(|out| hir::LlvmInlineAsmOutput {
1258 constraint: out.constraint,
1259 is_rw: out.is_rw,
1260 is_indirect: out.is_indirect,
1261 span: out.expr.span,
1262 })
1263 .collect(),
1264 asm: asm.asm,
1265 asm_str_style: asm.asm_str_style,
1266 clobbers: asm.clobbers.clone(),
1267 volatile: asm.volatile,
1268 alignstack: asm.alignstack,
1269 dialect: asm.dialect,
1270 };
1271 let hir_asm = hir::LlvmInlineAsm {
1272 inner,
1273 inputs_exprs: self.arena.alloc_from_iter(
1274 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1275 ),
1276 outputs_exprs: self
1277 .arena
1278 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1279 };
1280 hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1281 }
1282
1283 fn lower_field(&mut self, f: &Field) -> hir::Field<'hir> {
1284 hir::Field {
1285 hir_id: self.next_id(),
1286 ident: f.ident,
1287 expr: self.lower_expr(&f.expr),
1288 span: f.span,
1289 is_shorthand: f.is_shorthand,
1290 }
1291 }
1292
1293 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1294 match self.generator_kind {
1295 Some(hir::GeneratorKind::Gen) => {}
1296 Some(hir::GeneratorKind::Async(_)) => {
1297 struct_span_err!(
1298 self.sess,
1299 span,
1300 E0727,
1301 "`async` generators are not yet supported"
1302 )
1303 .emit();
1304 }
1305 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1306 }
1307
1308 let expr =
1309 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1310
1311 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1312 }
1313
1314 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1315 /// ```rust
1316 /// {
1317 /// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1318 /// mut iter => {
1319 /// [opt_ident]: loop {
1320 /// let mut __next;
1321 /// match ::std::iter::Iterator::next(&mut iter) {
1322 /// ::std::option::Option::Some(val) => __next = val,
1323 /// ::std::option::Option::None => break
1324 /// };
1325 /// let <pat> = __next;
1326 /// StmtKind::Expr(<body>);
1327 /// }
1328 /// }
1329 /// };
1330 /// result
1331 /// }
1332 /// ```
1333 fn lower_expr_for(
1334 &mut self,
1335 e: &Expr,
1336 pat: &Pat,
1337 head: &Expr,
1338 body: &Block,
1339 opt_label: Option<Label>,
1340 ) -> hir::Expr<'hir> {
1341 let orig_head_span = head.span;
1342 // expand <head>
1343 let mut head = self.lower_expr_mut(head);
1344 let desugared_span = self.mark_span_with_reason(
1345 DesugaringKind::ForLoop(ForLoopLoc::Head),
1346 orig_head_span,
1347 None,
1348 );
1349 head.span = desugared_span;
1350
1351 let iter = Ident::with_dummy_span(sym::iter);
1352
1353 let next_ident = Ident::with_dummy_span(sym::__next);
1354 let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1355 desugared_span,
1356 next_ident,
1357 hir::BindingAnnotation::Mutable,
1358 );
1359
1360 // `::std::option::Option::Some(val) => __next = val`
1361 let pat_arm = {
1362 let val_ident = Ident::with_dummy_span(sym::val);
1363 let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1364 let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1365 let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1366 let assign = self.arena.alloc(self.expr(
1367 pat.span,
1368 hir::ExprKind::Assign(next_expr, val_expr, pat.span),
1369 ThinVec::new(),
1370 ));
1371 let some_pat = self.pat_some(pat.span, val_pat);
1372 self.arm(some_pat, assign)
1373 };
1374
1375 // `::std::option::Option::None => break`
1376 let break_arm = {
1377 let break_expr =
1378 self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
1379 let pat = self.pat_none(e.span);
1380 self.arm(pat, break_expr)
1381 };
1382
1383 // `mut iter`
1384 let (iter_pat, iter_pat_nid) =
1385 self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1386
1387 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1388 let match_expr = {
1389 let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1390 let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1391 let next_expr = self.expr_call_lang_item_fn(
1392 desugared_span,
1393 hir::LangItem::IteratorNext,
1394 arena_vec![self; ref_mut_iter],
1395 );
1396 let arms = arena_vec![self; pat_arm, break_arm];
1397
1398 self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1399 };
1400 let match_stmt = self.stmt_expr(desugared_span, match_expr);
1401
1402 let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1403
1404 // `let mut __next`
1405 let next_let = self.stmt_let_pat(
1406 ThinVec::new(),
1407 desugared_span,
1408 None,
1409 next_pat,
1410 hir::LocalSource::ForLoopDesugar,
1411 );
1412
1413 // `let <pat> = __next`
1414 let pat = self.lower_pat(pat);
1415 let pat_let = self.stmt_let_pat(
1416 ThinVec::new(),
1417 desugared_span,
1418 Some(next_expr),
1419 pat,
1420 hir::LocalSource::ForLoopDesugar,
1421 );
1422
1423 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1424 let body_expr = self.expr_block(body_block, ThinVec::new());
1425 let body_stmt = self.stmt_expr(body.span, body_expr);
1426
1427 let loop_block = self.block_all(
1428 e.span,
1429 arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1430 None,
1431 );
1432
1433 // `[opt_ident]: loop { ... }`
1434 let kind = hir::ExprKind::Loop(loop_block, opt_label, hir::LoopSource::ForLoop);
1435 let loop_expr = self.arena.alloc(hir::Expr {
1436 hir_id: self.lower_node_id(e.id),
1437 kind,
1438 span: e.span,
1439 attrs: ThinVec::new(),
1440 });
1441
1442 // `mut iter => { ... }`
1443 let iter_arm = self.arm(iter_pat, loop_expr);
1444
1445 let into_iter_span = self.mark_span_with_reason(
1446 DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1447 orig_head_span,
1448 None,
1449 );
1450
1451 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1452 let into_iter_expr = {
1453 self.expr_call_lang_item_fn(
1454 into_iter_span,
1455 hir::LangItem::IntoIterIntoIter,
1456 arena_vec![self; head],
1457 )
1458 };
1459
1460 let match_expr = self.arena.alloc(self.expr_match(
1461 desugared_span,
1462 into_iter_expr,
1463 arena_vec![self; iter_arm],
1464 hir::MatchSource::ForLoopDesugar,
1465 ));
1466
1467 // This is effectively `{ let _result = ...; _result }`.
1468 // The construct was introduced in #21984 and is necessary to make sure that
1469 // temporaries in the `head` expression are dropped and do not leak to the
1470 // surrounding scope of the `match` since the `match` is not a terminating scope.
1471 //
1472 // Also, add the attributes to the outer returned expr node.
1473 self.expr_drop_temps_mut(desugared_span, match_expr, e.attrs.clone())
1474 }
1475
1476 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1477 /// ```rust
1478 /// match Try::into_result(<expr>) {
1479 /// Ok(val) => #[allow(unreachable_code)] val,
1480 /// Err(err) => #[allow(unreachable_code)]
1481 /// // If there is an enclosing `try {...}`:
1482 /// break 'catch_target Try::from_error(From::from(err)),
1483 /// // Otherwise:
1484 /// return Try::from_error(From::from(err)),
1485 /// }
1486 /// ```
1487 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1488 let unstable_span = self.mark_span_with_reason(
1489 DesugaringKind::QuestionMark,
1490 span,
1491 self.allow_try_trait.clone(),
1492 );
1493 let try_span = self.sess.source_map().end_point(span);
1494 let try_span = self.mark_span_with_reason(
1495 DesugaringKind::QuestionMark,
1496 try_span,
1497 self.allow_try_trait.clone(),
1498 );
1499
1500 // `Try::into_result(<expr>)`
1501 let scrutinee = {
1502 // expand <expr>
1503 let sub_expr = self.lower_expr_mut(sub_expr);
1504
1505 self.expr_call_lang_item_fn(
1506 unstable_span,
1507 hir::LangItem::TryIntoResult,
1508 arena_vec![self; sub_expr],
1509 )
1510 };
1511
1512 // `#[allow(unreachable_code)]`
1513 let attr = {
1514 // `allow(unreachable_code)`
1515 let allow = {
1516 let allow_ident = Ident::new(sym::allow, span);
1517 let uc_ident = Ident::new(sym::unreachable_code, span);
1518 let uc_nested = attr::mk_nested_word_item(uc_ident);
1519 attr::mk_list_item(allow_ident, vec![uc_nested])
1520 };
1521 attr::mk_attr_outer(allow)
1522 };
1523 let attrs = vec![attr];
1524
1525 // `Ok(val) => #[allow(unreachable_code)] val,`
1526 let ok_arm = {
1527 let val_ident = Ident::with_dummy_span(sym::val);
1528 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1529 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1530 span,
1531 val_ident,
1532 val_pat_nid,
1533 ThinVec::from(attrs.clone()),
1534 ));
1535 let ok_pat = self.pat_ok(span, val_pat);
1536 self.arm(ok_pat, val_expr)
1537 };
1538
1539 // `Err(err) => #[allow(unreachable_code)]
1540 // return Try::from_error(From::from(err)),`
1541 let err_arm = {
1542 let err_ident = Ident::with_dummy_span(sym::err);
1543 let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
1544 let from_expr = {
1545 let err_expr = self.expr_ident_mut(try_span, err_ident, err_local_nid);
1546 self.expr_call_lang_item_fn(
1547 try_span,
1548 hir::LangItem::FromFrom,
1549 arena_vec![self; err_expr],
1550 )
1551 };
1552 let from_err_expr = self.wrap_in_try_constructor(
1553 hir::LangItem::TryFromError,
1554 unstable_span,
1555 from_expr,
1556 try_span,
1557 );
1558 let thin_attrs = ThinVec::from(attrs);
1559 let catch_scope = self.catch_scopes.last().copied();
1560 let ret_expr = if let Some(catch_node) = catch_scope {
1561 let target_id = Ok(self.lower_node_id(catch_node));
1562 self.arena.alloc(self.expr(
1563 try_span,
1564 hir::ExprKind::Break(
1565 hir::Destination { label: None, target_id },
1566 Some(from_err_expr),
1567 ),
1568 thin_attrs,
1569 ))
1570 } else {
1571 self.arena.alloc(self.expr(
1572 try_span,
1573 hir::ExprKind::Ret(Some(from_err_expr)),
1574 thin_attrs,
1575 ))
1576 };
1577
1578 let err_pat = self.pat_err(try_span, err_local);
1579 self.arm(err_pat, ret_expr)
1580 };
1581
1582 hir::ExprKind::Match(
1583 scrutinee,
1584 arena_vec![self; err_arm, ok_arm],
1585 hir::MatchSource::TryDesugar,
1586 )
1587 }
1588
1589 // =========================================================================
1590 // Helper methods for building HIR.
1591 // =========================================================================
1592
1593 /// Constructs a `true` or `false` literal expression.
1594 pub(super) fn expr_bool(&mut self, span: Span, val: bool) -> &'hir hir::Expr<'hir> {
1595 let lit = Spanned { span, node: LitKind::Bool(val) };
1596 self.arena.alloc(self.expr(span, hir::ExprKind::Lit(lit), ThinVec::new()))
1597 }
1598
1599 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1600 ///
1601 /// In terms of drop order, it has the same effect as wrapping `expr` in
1602 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1603 ///
1604 /// The drop order can be important in e.g. `if expr { .. }`.
1605 pub(super) fn expr_drop_temps(
1606 &mut self,
1607 span: Span,
1608 expr: &'hir hir::Expr<'hir>,
1609 attrs: AttrVec,
1610 ) -> &'hir hir::Expr<'hir> {
1611 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1612 }
1613
1614 pub(super) fn expr_drop_temps_mut(
1615 &mut self,
1616 span: Span,
1617 expr: &'hir hir::Expr<'hir>,
1618 attrs: AttrVec,
1619 ) -> hir::Expr<'hir> {
1620 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1621 }
1622
1623 fn expr_match(
1624 &mut self,
1625 span: Span,
1626 arg: &'hir hir::Expr<'hir>,
1627 arms: &'hir [hir::Arm<'hir>],
1628 source: hir::MatchSource,
1629 ) -> hir::Expr<'hir> {
1630 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
1631 }
1632
1633 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1634 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1635 self.arena.alloc(self.expr(span, expr_break, attrs))
1636 }
1637
1638 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1639 self.expr(
1640 span,
1641 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1642 ThinVec::new(),
1643 )
1644 }
1645
1646 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1647 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
1648 }
1649
1650 fn expr_call_mut(
1651 &mut self,
1652 span: Span,
1653 e: &'hir hir::Expr<'hir>,
1654 args: &'hir [hir::Expr<'hir>],
1655 ) -> hir::Expr<'hir> {
1656 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
1657 }
1658
1659 fn expr_call(
1660 &mut self,
1661 span: Span,
1662 e: &'hir hir::Expr<'hir>,
1663 args: &'hir [hir::Expr<'hir>],
1664 ) -> &'hir hir::Expr<'hir> {
1665 self.arena.alloc(self.expr_call_mut(span, e, args))
1666 }
1667
1668 fn expr_call_lang_item_fn_mut(
1669 &mut self,
1670 span: Span,
1671 lang_item: hir::LangItem,
1672 args: &'hir [hir::Expr<'hir>],
1673 ) -> hir::Expr<'hir> {
1674 let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new()));
1675 self.expr_call_mut(span, path, args)
1676 }
1677
1678 fn expr_call_lang_item_fn(
1679 &mut self,
1680 span: Span,
1681 lang_item: hir::LangItem,
1682 args: &'hir [hir::Expr<'hir>],
1683 ) -> &'hir hir::Expr<'hir> {
1684 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
1685 }
1686
1687 fn expr_lang_item_path(
1688 &mut self,
1689 span: Span,
1690 lang_item: hir::LangItem,
1691 attrs: AttrVec,
1692 ) -> hir::Expr<'hir> {
1693 self.expr(span, hir::ExprKind::Path(hir::QPath::LangItem(lang_item, span)), attrs)
1694 }
1695
1696 pub(super) fn expr_ident(
1697 &mut self,
1698 sp: Span,
1699 ident: Ident,
1700 binding: hir::HirId,
1701 ) -> &'hir hir::Expr<'hir> {
1702 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1703 }
1704
1705 pub(super) fn expr_ident_mut(
1706 &mut self,
1707 sp: Span,
1708 ident: Ident,
1709 binding: hir::HirId,
1710 ) -> hir::Expr<'hir> {
1711 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
1712 }
1713
1714 fn expr_ident_with_attrs(
1715 &mut self,
1716 span: Span,
1717 ident: Ident,
1718 binding: hir::HirId,
1719 attrs: AttrVec,
1720 ) -> hir::Expr<'hir> {
1721 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1722 None,
1723 self.arena.alloc(hir::Path {
1724 span,
1725 res: Res::Local(binding),
1726 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
1727 }),
1728 ));
1729
1730 self.expr(span, expr_path, attrs)
1731 }
1732
1733 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1734 let hir_id = self.next_id();
1735 let span = expr.span;
1736 self.expr(
1737 span,
1738 hir::ExprKind::Block(
1739 self.arena.alloc(hir::Block {
1740 stmts: &[],
1741 expr: Some(expr),
1742 hir_id,
1743 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
1744 span,
1745 targeted_by_break: false,
1746 }),
1747 None,
1748 ),
1749 ThinVec::new(),
1750 )
1751 }
1752
1753 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
1754 let blk = self.block_all(span, &[], None);
1755 let expr = self.expr_block(blk, ThinVec::new());
1756 self.arena.alloc(expr)
1757 }
1758
1759 pub(super) fn expr_block(
1760 &mut self,
1761 b: &'hir hir::Block<'hir>,
1762 attrs: AttrVec,
1763 ) -> hir::Expr<'hir> {
1764 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
1765 }
1766
1767 pub(super) fn expr(
1768 &mut self,
1769 span: Span,
1770 kind: hir::ExprKind<'hir>,
1771 attrs: AttrVec,
1772 ) -> hir::Expr<'hir> {
1773 hir::Expr { hir_id: self.next_id(), kind, span, attrs }
1774 }
1775
1776 fn field(&mut self, ident: Ident, expr: &'hir hir::Expr<'hir>, span: Span) -> hir::Field<'hir> {
1777 hir::Field { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
1778 }
1779
1780 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
1781 hir::Arm {
1782 hir_id: self.next_id(),
1783 attrs: &[],
1784 pat,
1785 guard: None,
1786 span: expr.span,
1787 body: expr,
1788 }
1789 }
1790 }