]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_ast_lowering/src/expr.rs
New upstream version 1.60.0+dfsg1
[rustc.git] / compiler / rustc_ast_lowering / src / expr.rs
1 use crate::{FnDeclKind, ImplTraitPosition};
2
3 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
4
5 use rustc_ast::attr;
6 use rustc_ast::ptr::P as AstP;
7 use rustc_ast::*;
8 use rustc_data_structures::stack::ensure_sufficient_stack;
9 use rustc_data_structures::thin_vec::ThinVec;
10 use rustc_errors::struct_span_err;
11 use rustc_hir as hir;
12 use rustc_hir::def::Res;
13 use rustc_hir::definitions::DefPathData;
14 use rustc_span::hygiene::ExpnId;
15 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
16 use rustc_span::symbol::{sym, Ident};
17 use rustc_span::DUMMY_SP;
18
19 impl<'hir> LoweringContext<'_, 'hir> {
20 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
21 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
22 }
23
24 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
25 self.arena.alloc(self.lower_expr_mut(e))
26 }
27
28 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
29 ensure_sufficient_stack(|| {
30 let kind = match e.kind {
31 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
32 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
33 ExprKind::ConstBlock(ref anon_const) => {
34 let anon_const = self.lower_anon_const(anon_const);
35 hir::ExprKind::ConstBlock(anon_const)
36 }
37 ExprKind::Repeat(ref expr, ref count) => {
38 let expr = self.lower_expr(expr);
39 let count = self.lower_array_length(count);
40 hir::ExprKind::Repeat(expr, count)
41 }
42 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
43 ExprKind::Call(ref f, ref args) => {
44 if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) {
45 self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
46 } else {
47 let f = self.lower_expr(f);
48 hir::ExprKind::Call(f, self.lower_exprs(args))
49 }
50 }
51 ExprKind::MethodCall(ref seg, ref args, span) => {
52 let hir_seg = self.arena.alloc(self.lower_path_segment(
53 e.span,
54 seg,
55 ParamMode::Optional,
56 0,
57 ParenthesizedGenericArgs::Err,
58 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
59 ));
60 let args = self.lower_exprs(args);
61 hir::ExprKind::MethodCall(hir_seg, args, self.lower_span(span))
62 }
63 ExprKind::Binary(binop, ref lhs, ref rhs) => {
64 let binop = self.lower_binop(binop);
65 let lhs = self.lower_expr(lhs);
66 let rhs = self.lower_expr(rhs);
67 hir::ExprKind::Binary(binop, lhs, rhs)
68 }
69 ExprKind::Unary(op, ref ohs) => {
70 let op = self.lower_unop(op);
71 let ohs = self.lower_expr(ohs);
72 hir::ExprKind::Unary(op, ohs)
73 }
74 ExprKind::Lit(ref l) => {
75 hir::ExprKind::Lit(respan(self.lower_span(l.span), l.kind.clone()))
76 }
77 ExprKind::Cast(ref expr, ref ty) => {
78 let expr = self.lower_expr(expr);
79 let ty =
80 self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type));
81 hir::ExprKind::Cast(expr, ty)
82 }
83 ExprKind::Type(ref expr, ref ty) => {
84 let expr = self.lower_expr(expr);
85 let ty =
86 self.lower_ty(ty, ImplTraitContext::Disallowed(ImplTraitPosition::Type));
87 hir::ExprKind::Type(expr, ty)
88 }
89 ExprKind::AddrOf(k, m, ref ohs) => {
90 let ohs = self.lower_expr(ohs);
91 hir::ExprKind::AddrOf(k, m, ohs)
92 }
93 ExprKind::Let(ref pat, ref scrutinee, span) => {
94 hir::ExprKind::Let(self.arena.alloc(hir::Let {
95 hir_id: self.next_id(),
96 span: self.lower_span(span),
97 pat: self.lower_pat(pat),
98 ty: None,
99 init: self.lower_expr(scrutinee),
100 }))
101 }
102 ExprKind::If(ref cond, ref then, ref else_opt) => {
103 self.lower_expr_if(cond, then, else_opt.as_deref())
104 }
105 ExprKind::While(ref cond, ref body, opt_label) => {
106 self.with_loop_scope(e.id, |this| {
107 let span =
108 this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None);
109 this.lower_expr_while_in_loop_scope(span, cond, body, opt_label)
110 })
111 }
112 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
113 hir::ExprKind::Loop(
114 this.lower_block(body, false),
115 this.lower_label(opt_label),
116 hir::LoopSource::Loop,
117 DUMMY_SP,
118 )
119 }),
120 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
121 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
122 self.lower_expr(expr),
123 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
124 hir::MatchSource::Normal,
125 ),
126 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
127 .make_async_expr(
128 capture_clause,
129 closure_node_id,
130 None,
131 block.span,
132 hir::AsyncGeneratorKind::Block,
133 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
134 ),
135 ExprKind::Await(ref expr) => {
136 let span = if expr.span.hi() < e.span.hi() {
137 expr.span.shrink_to_hi().with_hi(e.span.hi())
138 } else {
139 // this is a recovered `await expr`
140 e.span
141 };
142 self.lower_expr_await(span, expr)
143 }
144 ExprKind::Closure(
145 capture_clause,
146 asyncness,
147 movability,
148 ref decl,
149 ref body,
150 fn_decl_span,
151 ) => {
152 if let Async::Yes { closure_id, .. } = asyncness {
153 self.lower_expr_async_closure(
154 capture_clause,
155 closure_id,
156 decl,
157 body,
158 fn_decl_span,
159 )
160 } else {
161 self.lower_expr_closure(
162 capture_clause,
163 movability,
164 decl,
165 body,
166 fn_decl_span,
167 )
168 }
169 }
170 ExprKind::Block(ref blk, opt_label) => {
171 let opt_label = self.lower_label(opt_label);
172 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
173 }
174 ExprKind::Assign(ref el, ref er, span) => {
175 self.lower_expr_assign(el, er, span, e.span)
176 }
177 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
178 self.lower_binop(op),
179 self.lower_expr(el),
180 self.lower_expr(er),
181 ),
182 ExprKind::Field(ref el, ident) => {
183 hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(ident))
184 }
185 ExprKind::Index(ref el, ref er) => {
186 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
187 }
188 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
189 self.lower_expr_range_closed(e.span, e1, e2)
190 }
191 ExprKind::Range(ref e1, ref e2, lims) => {
192 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
193 }
194 ExprKind::Underscore => {
195 self.sess
196 .struct_span_err(
197 e.span,
198 "in expressions, `_` can only be used on the left-hand side of an assignment",
199 )
200 .span_label(e.span, "`_` not allowed here")
201 .emit();
202 hir::ExprKind::Err
203 }
204 ExprKind::Path(ref qself, ref path) => {
205 let qpath = self.lower_qpath(
206 e.id,
207 qself,
208 path,
209 ParamMode::Optional,
210 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
211 );
212 hir::ExprKind::Path(qpath)
213 }
214 ExprKind::Break(opt_label, ref opt_expr) => {
215 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
216 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
217 }
218 ExprKind::Continue(opt_label) => {
219 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
220 }
221 ExprKind::Ret(ref e) => {
222 let e = e.as_ref().map(|x| self.lower_expr(x));
223 hir::ExprKind::Ret(e)
224 }
225 ExprKind::InlineAsm(ref asm) => {
226 hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
227 }
228 ExprKind::Struct(ref se) => {
229 let rest = match &se.rest {
230 StructRest::Base(e) => Some(self.lower_expr(e)),
231 StructRest::Rest(sp) => {
232 self.sess
233 .struct_span_err(*sp, "base expression required after `..`")
234 .span_label(*sp, "add a base expression here")
235 .emit();
236 Some(&*self.arena.alloc(self.expr_err(*sp)))
237 }
238 StructRest::None => None,
239 };
240 hir::ExprKind::Struct(
241 self.arena.alloc(self.lower_qpath(
242 e.id,
243 &se.qself,
244 &se.path,
245 ParamMode::Optional,
246 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
247 )),
248 self.arena
249 .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
250 rest,
251 )
252 }
253 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
254 ExprKind::Err => hir::ExprKind::Err,
255 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
256 ExprKind::Paren(ref ex) => {
257 let mut ex = self.lower_expr_mut(ex);
258 // Include parens in span, but only if it is a super-span.
259 if e.span.contains(ex.span) {
260 ex.span = self.lower_span(e.span);
261 }
262 // Merge attributes into the inner expression.
263 if !e.attrs.is_empty() {
264 let old_attrs =
265 self.attrs.get(&ex.hir_id.local_id).map(|la| *la).unwrap_or(&[]);
266 self.attrs.insert(
267 ex.hir_id.local_id,
268 &*self.arena.alloc_from_iter(
269 e.attrs
270 .iter()
271 .map(|a| self.lower_attr(a))
272 .chain(old_attrs.iter().cloned()),
273 ),
274 );
275 }
276 return ex;
277 }
278
279 // Desugar `ExprForLoop`
280 // from: `[opt_ident]: for <pat> in <head> <body>`
281 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
282 return self.lower_expr_for(e, pat, head, body, opt_label);
283 }
284 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
285 };
286
287 let hir_id = self.lower_node_id(e.id);
288 self.lower_attrs(hir_id, &e.attrs);
289 hir::Expr { hir_id, kind, span: self.lower_span(e.span) }
290 })
291 }
292
293 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
294 match u {
295 UnOp::Deref => hir::UnOp::Deref,
296 UnOp::Not => hir::UnOp::Not,
297 UnOp::Neg => hir::UnOp::Neg,
298 }
299 }
300
301 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
302 Spanned {
303 node: match b.node {
304 BinOpKind::Add => hir::BinOpKind::Add,
305 BinOpKind::Sub => hir::BinOpKind::Sub,
306 BinOpKind::Mul => hir::BinOpKind::Mul,
307 BinOpKind::Div => hir::BinOpKind::Div,
308 BinOpKind::Rem => hir::BinOpKind::Rem,
309 BinOpKind::And => hir::BinOpKind::And,
310 BinOpKind::Or => hir::BinOpKind::Or,
311 BinOpKind::BitXor => hir::BinOpKind::BitXor,
312 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
313 BinOpKind::BitOr => hir::BinOpKind::BitOr,
314 BinOpKind::Shl => hir::BinOpKind::Shl,
315 BinOpKind::Shr => hir::BinOpKind::Shr,
316 BinOpKind::Eq => hir::BinOpKind::Eq,
317 BinOpKind::Lt => hir::BinOpKind::Lt,
318 BinOpKind::Le => hir::BinOpKind::Le,
319 BinOpKind::Ne => hir::BinOpKind::Ne,
320 BinOpKind::Ge => hir::BinOpKind::Ge,
321 BinOpKind::Gt => hir::BinOpKind::Gt,
322 },
323 span: self.lower_span(b.span),
324 }
325 }
326
327 fn lower_legacy_const_generics(
328 &mut self,
329 mut f: Expr,
330 args: Vec<AstP<Expr>>,
331 legacy_args_idx: &[usize],
332 ) -> hir::ExprKind<'hir> {
333 let path = match f.kind {
334 ExprKind::Path(None, ref mut path) => path,
335 _ => unreachable!(),
336 };
337
338 // Split the arguments into const generics and normal arguments
339 let mut real_args = vec![];
340 let mut generic_args = vec![];
341 for (idx, arg) in args.into_iter().enumerate() {
342 if legacy_args_idx.contains(&idx) {
343 let parent_def_id = self.current_hir_id_owner;
344 let node_id = self.resolver.next_node_id();
345
346 // Add a definition for the in-band const def.
347 self.resolver.create_def(
348 parent_def_id,
349 node_id,
350 DefPathData::AnonConst,
351 ExpnId::root(),
352 arg.span,
353 );
354
355 let anon_const = AnonConst { id: node_id, value: arg };
356 generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
357 } else {
358 real_args.push(arg);
359 }
360 }
361
362 // Add generic args to the last element of the path.
363 let last_segment = path.segments.last_mut().unwrap();
364 assert!(last_segment.args.is_none());
365 last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs {
366 span: DUMMY_SP,
367 args: generic_args,
368 })));
369
370 // Now lower everything as normal.
371 let f = self.lower_expr(&f);
372 hir::ExprKind::Call(f, self.lower_exprs(&real_args))
373 }
374
375 fn lower_expr_if(
376 &mut self,
377 cond: &Expr,
378 then: &Block,
379 else_opt: Option<&Expr>,
380 ) -> hir::ExprKind<'hir> {
381 let lowered_cond = self.lower_expr(cond);
382 let new_cond = self.manage_let_cond(lowered_cond);
383 let then_expr = self.lower_block_expr(then);
384 if let Some(rslt) = else_opt {
385 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), Some(self.lower_expr(rslt)))
386 } else {
387 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), None)
388 }
389 }
390
391 // If `cond` kind is `let`, returns `let`. Otherwise, wraps and returns `cond`
392 // in a temporary block.
393 fn manage_let_cond(&mut self, cond: &'hir hir::Expr<'hir>) -> &'hir hir::Expr<'hir> {
394 fn has_let_expr<'hir>(expr: &'hir hir::Expr<'hir>) -> bool {
395 match expr.kind {
396 hir::ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
397 hir::ExprKind::Let(..) => true,
398 _ => false,
399 }
400 }
401 if has_let_expr(cond) {
402 cond
403 } else {
404 let reason = DesugaringKind::CondTemporary;
405 let span_block = self.mark_span_with_reason(reason, cond.span, None);
406 self.expr_drop_temps(span_block, cond, AttrVec::new())
407 }
408 }
409
410 // We desugar: `'label: while $cond $body` into:
411 //
412 // ```
413 // 'label: loop {
414 // if { let _t = $cond; _t } {
415 // $body
416 // }
417 // else {
418 // break;
419 // }
420 // }
421 // ```
422 //
423 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
424 // to preserve drop semantics since `while $cond { ... }` does not
425 // let temporaries live outside of `cond`.
426 fn lower_expr_while_in_loop_scope(
427 &mut self,
428 span: Span,
429 cond: &Expr,
430 body: &Block,
431 opt_label: Option<Label>,
432 ) -> hir::ExprKind<'hir> {
433 let lowered_cond = self.with_loop_condition_scope(|t| t.lower_expr(cond));
434 let new_cond = self.manage_let_cond(lowered_cond);
435 let then = self.lower_block_expr(body);
436 let expr_break = self.expr_break(span, ThinVec::new());
437 let stmt_break = self.stmt_expr(span, expr_break);
438 let else_blk = self.block_all(span, arena_vec![self; stmt_break], None);
439 let else_expr = self.arena.alloc(self.expr_block(else_blk, ThinVec::new()));
440 let if_kind = hir::ExprKind::If(new_cond, self.arena.alloc(then), Some(else_expr));
441 let if_expr = self.expr(span, if_kind, ThinVec::new());
442 let block = self.block_expr(self.arena.alloc(if_expr));
443 let span = self.lower_span(span.with_hi(cond.span.hi()));
444 let opt_label = self.lower_label(opt_label);
445 hir::ExprKind::Loop(block, opt_label, hir::LoopSource::While, span)
446 }
447
448 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_output(<expr>) }`,
449 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_output(()) }`
450 /// and save the block id to use it as a break target for desugaring of the `?` operator.
451 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
452 self.with_catch_scope(body.id, |this| {
453 let mut block = this.lower_block_noalloc(body, true);
454
455 // Final expression of the block (if present) or `()` with span at the end of block
456 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
457 (
458 this.mark_span_with_reason(
459 DesugaringKind::TryBlock,
460 expr.span,
461 this.allow_try_trait.clone(),
462 ),
463 expr,
464 )
465 } else {
466 let try_span = this.mark_span_with_reason(
467 DesugaringKind::TryBlock,
468 this.sess.source_map().end_point(body.span),
469 this.allow_try_trait.clone(),
470 );
471
472 (try_span, this.expr_unit(try_span))
473 };
474
475 let ok_wrapped_span =
476 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
477
478 // `::std::ops::Try::from_output($tail_expr)`
479 block.expr = Some(this.wrap_in_try_constructor(
480 hir::LangItem::TryTraitFromOutput,
481 try_span,
482 tail_expr,
483 ok_wrapped_span,
484 ));
485
486 hir::ExprKind::Block(this.arena.alloc(block), None)
487 })
488 }
489
490 fn wrap_in_try_constructor(
491 &mut self,
492 lang_item: hir::LangItem,
493 method_span: Span,
494 expr: &'hir hir::Expr<'hir>,
495 overall_span: Span,
496 ) -> &'hir hir::Expr<'hir> {
497 let constructor = self.arena.alloc(self.expr_lang_item_path(
498 method_span,
499 lang_item,
500 ThinVec::new(),
501 None,
502 ));
503 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
504 }
505
506 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
507 let pat = self.lower_pat(&arm.pat);
508 let guard = arm.guard.as_ref().map(|cond| {
509 if let ExprKind::Let(ref pat, ref scrutinee, _) = cond.kind {
510 hir::Guard::IfLet(self.lower_pat(pat), self.lower_expr(scrutinee))
511 } else {
512 hir::Guard::If(self.lower_expr(cond))
513 }
514 });
515 let hir_id = self.next_id();
516 self.lower_attrs(hir_id, &arm.attrs);
517 hir::Arm {
518 hir_id,
519 pat,
520 guard,
521 body: self.lower_expr(&arm.body),
522 span: self.lower_span(arm.span),
523 }
524 }
525
526 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
527 ///
528 /// This results in:
529 ///
530 /// ```text
531 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
532 /// <body>
533 /// })
534 /// ```
535 pub(super) fn make_async_expr(
536 &mut self,
537 capture_clause: CaptureBy,
538 closure_node_id: NodeId,
539 ret_ty: Option<AstP<Ty>>,
540 span: Span,
541 async_gen_kind: hir::AsyncGeneratorKind,
542 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
543 ) -> hir::ExprKind<'hir> {
544 let output = match ret_ty {
545 Some(ty) => hir::FnRetTy::Return(
546 self.lower_ty(&ty, ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock)),
547 ),
548 None => hir::FnRetTy::DefaultReturn(self.lower_span(span)),
549 };
550
551 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
552 // fully constrained by `future::from_generator`.
553 let input_ty = hir::Ty {
554 hir_id: self.next_id(),
555 kind: hir::TyKind::Infer,
556 span: self.lower_span(span),
557 };
558
559 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
560 let decl = self.arena.alloc(hir::FnDecl {
561 inputs: arena_vec![self; input_ty],
562 output,
563 c_variadic: false,
564 implicit_self: hir::ImplicitSelfKind::None,
565 });
566
567 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
568 let (pat, task_context_hid) = self.pat_ident_binding_mode(
569 span,
570 Ident::with_dummy_span(sym::_task_context),
571 hir::BindingAnnotation::Mutable,
572 );
573 let param = hir::Param {
574 hir_id: self.next_id(),
575 pat,
576 ty_span: self.lower_span(span),
577 span: self.lower_span(span),
578 };
579 let params = arena_vec![self; param];
580
581 let body_id = self.lower_body(move |this| {
582 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
583
584 let old_ctx = this.task_context;
585 this.task_context = Some(task_context_hid);
586 let res = body(this);
587 this.task_context = old_ctx;
588 (params, res)
589 });
590
591 // `static |_task_context| -> <ret_ty> { body }`:
592 let generator_kind = hir::ExprKind::Closure(
593 capture_clause,
594 decl,
595 body_id,
596 self.lower_span(span),
597 Some(hir::Movability::Static),
598 );
599 let generator = hir::Expr {
600 hir_id: self.lower_node_id(closure_node_id),
601 kind: generator_kind,
602 span: self.lower_span(span),
603 };
604
605 // `future::from_generator`:
606 let unstable_span =
607 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
608 let gen_future = self.expr_lang_item_path(
609 unstable_span,
610 hir::LangItem::FromGenerator,
611 ThinVec::new(),
612 None,
613 );
614
615 // `future::from_generator(generator)`:
616 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
617 }
618
619 /// Desugar `<expr>.await` into:
620 /// ```rust
621 /// match ::std::future::IntoFuture::into_future(<expr>) {
622 /// mut pinned => loop {
623 /// match unsafe { ::std::future::Future::poll(
624 /// <::std::pin::Pin>::new_unchecked(&mut pinned),
625 /// ::std::future::get_context(task_context),
626 /// ) } {
627 /// ::std::task::Poll::Ready(result) => break result,
628 /// ::std::task::Poll::Pending => {}
629 /// }
630 /// task_context = yield ();
631 /// }
632 /// }
633 /// ```
634 fn lower_expr_await(&mut self, dot_await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
635 let full_span = expr.span.to(dot_await_span);
636 match self.generator_kind {
637 Some(hir::GeneratorKind::Async(_)) => {}
638 Some(hir::GeneratorKind::Gen) | None => {
639 let mut err = struct_span_err!(
640 self.sess,
641 dot_await_span,
642 E0728,
643 "`await` is only allowed inside `async` functions and blocks"
644 );
645 err.span_label(dot_await_span, "only allowed inside `async` functions and blocks");
646 if let Some(item_sp) = self.current_item {
647 err.span_label(item_sp, "this is not `async`");
648 }
649 err.emit();
650 }
651 }
652 let span = self.mark_span_with_reason(DesugaringKind::Await, dot_await_span, None);
653 let gen_future_span = self.mark_span_with_reason(
654 DesugaringKind::Await,
655 full_span,
656 self.allow_gen_future.clone(),
657 );
658 let expr = self.lower_expr_mut(expr);
659 let expr_hir_id = expr.hir_id;
660
661 let pinned_ident = Ident::with_dummy_span(sym::pinned);
662 let (pinned_pat, pinned_pat_hid) =
663 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
664
665 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
666
667 // unsafe {
668 // ::std::future::Future::poll(
669 // ::std::pin::Pin::new_unchecked(&mut pinned),
670 // ::std::future::get_context(task_context),
671 // )
672 // }
673 let poll_expr = {
674 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
675 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
676 let task_context = if let Some(task_context_hid) = self.task_context {
677 self.expr_ident_mut(span, task_context_ident, task_context_hid)
678 } else {
679 // Use of `await` outside of an async context, we cannot use `task_context` here.
680 self.expr_err(span)
681 };
682 let new_unchecked = self.expr_call_lang_item_fn_mut(
683 span,
684 hir::LangItem::PinNewUnchecked,
685 arena_vec![self; ref_mut_pinned],
686 Some(expr_hir_id),
687 );
688 let get_context = self.expr_call_lang_item_fn_mut(
689 gen_future_span,
690 hir::LangItem::GetContext,
691 arena_vec![self; task_context],
692 Some(expr_hir_id),
693 );
694 let call = self.expr_call_lang_item_fn(
695 span,
696 hir::LangItem::FuturePoll,
697 arena_vec![self; new_unchecked, get_context],
698 Some(expr_hir_id),
699 );
700 self.arena.alloc(self.expr_unsafe(call))
701 };
702
703 // `::std::task::Poll::Ready(result) => break result`
704 let loop_node_id = self.resolver.next_node_id();
705 let loop_hir_id = self.lower_node_id(loop_node_id);
706 let ready_arm = {
707 let x_ident = Ident::with_dummy_span(sym::result);
708 let (x_pat, x_pat_hid) = self.pat_ident(gen_future_span, x_ident);
709 let x_expr = self.expr_ident(gen_future_span, x_ident, x_pat_hid);
710 let ready_field = self.single_pat_field(gen_future_span, x_pat);
711 let ready_pat = self.pat_lang_item_variant(
712 span,
713 hir::LangItem::PollReady,
714 ready_field,
715 Some(expr_hir_id),
716 );
717 let break_x = self.with_loop_scope(loop_node_id, move |this| {
718 let expr_break =
719 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
720 this.arena.alloc(this.expr(gen_future_span, expr_break, ThinVec::new()))
721 });
722 self.arm(ready_pat, break_x)
723 };
724
725 // `::std::task::Poll::Pending => {}`
726 let pending_arm = {
727 let pending_pat = self.pat_lang_item_variant(
728 span,
729 hir::LangItem::PollPending,
730 &[],
731 Some(expr_hir_id),
732 );
733 let empty_block = self.expr_block_empty(span);
734 self.arm(pending_pat, empty_block)
735 };
736
737 let inner_match_stmt = {
738 let match_expr = self.expr_match(
739 span,
740 poll_expr,
741 arena_vec![self; ready_arm, pending_arm],
742 hir::MatchSource::AwaitDesugar,
743 );
744 self.stmt_expr(span, match_expr)
745 };
746
747 // task_context = yield ();
748 let yield_stmt = {
749 let unit = self.expr_unit(span);
750 let yield_expr = self.expr(
751 span,
752 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr_hir_id) }),
753 ThinVec::new(),
754 );
755 let yield_expr = self.arena.alloc(yield_expr);
756
757 if let Some(task_context_hid) = self.task_context {
758 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
759 let assign = self.expr(
760 span,
761 hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)),
762 AttrVec::new(),
763 );
764 self.stmt_expr(span, assign)
765 } else {
766 // Use of `await` outside of an async context. Return `yield_expr` so that we can
767 // proceed with type checking.
768 self.stmt(span, hir::StmtKind::Semi(yield_expr))
769 }
770 };
771
772 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
773
774 // loop { .. }
775 let loop_expr = self.arena.alloc(hir::Expr {
776 hir_id: loop_hir_id,
777 kind: hir::ExprKind::Loop(
778 loop_block,
779 None,
780 hir::LoopSource::Loop,
781 self.lower_span(span),
782 ),
783 span: self.lower_span(span),
784 });
785
786 // mut pinned => loop { ... }
787 let pinned_arm = self.arm(pinned_pat, loop_expr);
788
789 // `match ::std::future::IntoFuture::into_future(<expr>) { ... }`
790 let into_future_span = self.mark_span_with_reason(
791 DesugaringKind::Await,
792 dot_await_span,
793 self.allow_into_future.clone(),
794 );
795 let into_future_expr = self.expr_call_lang_item_fn(
796 into_future_span,
797 hir::LangItem::IntoFutureIntoFuture,
798 arena_vec![self; expr],
799 Some(expr_hir_id),
800 );
801
802 // match <into_future_expr> {
803 // mut pinned => loop { .. }
804 // }
805 hir::ExprKind::Match(
806 into_future_expr,
807 arena_vec![self; pinned_arm],
808 hir::MatchSource::AwaitDesugar,
809 )
810 }
811
812 fn lower_expr_closure(
813 &mut self,
814 capture_clause: CaptureBy,
815 movability: Movability,
816 decl: &FnDecl,
817 body: &Expr,
818 fn_decl_span: Span,
819 ) -> hir::ExprKind<'hir> {
820 let (body_id, generator_option) = self.with_new_scopes(move |this| {
821 let prev = this.current_item;
822 this.current_item = Some(fn_decl_span);
823 let mut generator_kind = None;
824 let body_id = this.lower_fn_body(decl, |this| {
825 let e = this.lower_expr_mut(body);
826 generator_kind = this.generator_kind;
827 e
828 });
829 let generator_option =
830 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
831 this.current_item = prev;
832 (body_id, generator_option)
833 });
834
835 // Lower outside new scope to preserve `is_in_loop_condition`.
836 let fn_decl = self.lower_fn_decl(decl, None, FnDeclKind::Closure, None);
837
838 hir::ExprKind::Closure(
839 capture_clause,
840 fn_decl,
841 body_id,
842 self.lower_span(fn_decl_span),
843 generator_option,
844 )
845 }
846
847 fn generator_movability_for_fn(
848 &mut self,
849 decl: &FnDecl,
850 fn_decl_span: Span,
851 generator_kind: Option<hir::GeneratorKind>,
852 movability: Movability,
853 ) -> Option<hir::Movability> {
854 match generator_kind {
855 Some(hir::GeneratorKind::Gen) => {
856 if decl.inputs.len() > 1 {
857 struct_span_err!(
858 self.sess,
859 fn_decl_span,
860 E0628,
861 "too many parameters for a generator (expected 0 or 1 parameters)"
862 )
863 .emit();
864 }
865 Some(movability)
866 }
867 Some(hir::GeneratorKind::Async(_)) => {
868 panic!("non-`async` closure body turned `async` during lowering");
869 }
870 None => {
871 if movability == Movability::Static {
872 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
873 .emit();
874 }
875 None
876 }
877 }
878 }
879
880 fn lower_expr_async_closure(
881 &mut self,
882 capture_clause: CaptureBy,
883 closure_id: NodeId,
884 decl: &FnDecl,
885 body: &Expr,
886 fn_decl_span: Span,
887 ) -> hir::ExprKind<'hir> {
888 let outer_decl =
889 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
890
891 let body_id = self.with_new_scopes(|this| {
892 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
893 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
894 struct_span_err!(
895 this.sess,
896 fn_decl_span,
897 E0708,
898 "`async` non-`move` closures with parameters are not currently supported",
899 )
900 .help(
901 "consider using `let` statements to manually capture \
902 variables by reference before entering an `async move` closure",
903 )
904 .emit();
905 }
906
907 // Transform `async |x: u8| -> X { ... }` into
908 // `|x: u8| future_from_generator(|| -> X { ... })`.
909 let body_id = this.lower_fn_body(&outer_decl, |this| {
910 let async_ret_ty =
911 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
912 let async_body = this.make_async_expr(
913 capture_clause,
914 closure_id,
915 async_ret_ty,
916 body.span,
917 hir::AsyncGeneratorKind::Closure,
918 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
919 );
920 this.expr(fn_decl_span, async_body, ThinVec::new())
921 });
922 body_id
923 });
924
925 // We need to lower the declaration outside the new scope, because we
926 // have to conserve the state of being inside a loop condition for the
927 // closure argument types.
928 let fn_decl = self.lower_fn_decl(&outer_decl, None, FnDeclKind::Closure, None);
929
930 hir::ExprKind::Closure(
931 capture_clause,
932 fn_decl,
933 body_id,
934 self.lower_span(fn_decl_span),
935 None,
936 )
937 }
938
939 /// Destructure the LHS of complex assignments.
940 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
941 fn lower_expr_assign(
942 &mut self,
943 lhs: &Expr,
944 rhs: &Expr,
945 eq_sign_span: Span,
946 whole_span: Span,
947 ) -> hir::ExprKind<'hir> {
948 // Return early in case of an ordinary assignment.
949 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
950 match &lhs.kind {
951 ExprKind::Array(..)
952 | ExprKind::Struct(..)
953 | ExprKind::Tup(..)
954 | ExprKind::Underscore => false,
955 // Check for tuple struct constructor.
956 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
957 ExprKind::Paren(e) => {
958 match e.kind {
959 // We special-case `(..)` for consistency with patterns.
960 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
961 _ => is_ordinary(lower_ctx, e),
962 }
963 }
964 _ => true,
965 }
966 }
967 if is_ordinary(self, lhs) {
968 return hir::ExprKind::Assign(
969 self.lower_expr(lhs),
970 self.lower_expr(rhs),
971 self.lower_span(eq_sign_span),
972 );
973 }
974
975 let mut assignments = vec![];
976
977 // The LHS becomes a pattern: `(lhs1, lhs2)`.
978 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
979 let rhs = self.lower_expr(rhs);
980
981 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
982 let destructure_let = self.stmt_let_pat(
983 None,
984 whole_span,
985 Some(rhs),
986 pat,
987 hir::LocalSource::AssignDesugar(self.lower_span(eq_sign_span)),
988 );
989
990 // `a = lhs1; b = lhs2;`.
991 let stmts = self
992 .arena
993 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
994
995 // Wrap everything in a block.
996 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
997 }
998
999 /// If the given expression is a path to a tuple struct, returns that path.
1000 /// It is not a complete check, but just tries to reject most paths early
1001 /// if they are not tuple structs.
1002 /// Type checking will take care of the full validation later.
1003 fn extract_tuple_struct_path<'a>(
1004 &mut self,
1005 expr: &'a Expr,
1006 ) -> Option<(&'a Option<QSelf>, &'a Path)> {
1007 if let ExprKind::Path(qself, path) = &expr.kind {
1008 // Does the path resolve to something disallowed in a tuple struct/variant pattern?
1009 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
1010 if partial_res.unresolved_segments() == 0
1011 && !partial_res.base_res().expected_in_tuple_struct_pat()
1012 {
1013 return None;
1014 }
1015 }
1016 return Some((qself, path));
1017 }
1018 None
1019 }
1020
1021 /// Convert the LHS of a destructuring assignment to a pattern.
1022 /// Each sub-assignment is recorded in `assignments`.
1023 fn destructure_assign(
1024 &mut self,
1025 lhs: &Expr,
1026 eq_sign_span: Span,
1027 assignments: &mut Vec<hir::Stmt<'hir>>,
1028 ) -> &'hir hir::Pat<'hir> {
1029 self.arena.alloc(self.destructure_assign_mut(lhs, eq_sign_span, assignments))
1030 }
1031
1032 fn destructure_assign_mut(
1033 &mut self,
1034 lhs: &Expr,
1035 eq_sign_span: Span,
1036 assignments: &mut Vec<hir::Stmt<'hir>>,
1037 ) -> hir::Pat<'hir> {
1038 match &lhs.kind {
1039 // Underscore pattern.
1040 ExprKind::Underscore => {
1041 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
1042 }
1043 // Slice patterns.
1044 ExprKind::Array(elements) => {
1045 let (pats, rest) =
1046 self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
1047 let slice_pat = if let Some((i, span)) = rest {
1048 let (before, after) = pats.split_at(i);
1049 hir::PatKind::Slice(
1050 before,
1051 Some(self.arena.alloc(self.pat_without_dbm(span, hir::PatKind::Wild))),
1052 after,
1053 )
1054 } else {
1055 hir::PatKind::Slice(pats, None, &[])
1056 };
1057 return self.pat_without_dbm(lhs.span, slice_pat);
1058 }
1059 // Tuple structs.
1060 ExprKind::Call(callee, args) => {
1061 if let Some((qself, path)) = self.extract_tuple_struct_path(callee) {
1062 let (pats, rest) = self.destructure_sequence(
1063 args,
1064 "tuple struct or variant",
1065 eq_sign_span,
1066 assignments,
1067 );
1068 let qpath = self.lower_qpath(
1069 callee.id,
1070 qself,
1071 path,
1072 ParamMode::Optional,
1073 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1074 );
1075 // Destructure like a tuple struct.
1076 let tuple_struct_pat =
1077 hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0));
1078 return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1079 }
1080 }
1081 // Structs.
1082 ExprKind::Struct(se) => {
1083 let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
1084 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1085 hir::PatField {
1086 hir_id: self.next_id(),
1087 ident: self.lower_ident(f.ident),
1088 pat,
1089 is_shorthand: f.is_shorthand,
1090 span: self.lower_span(f.span),
1091 }
1092 }));
1093 let qpath = self.lower_qpath(
1094 lhs.id,
1095 &se.qself,
1096 &se.path,
1097 ParamMode::Optional,
1098 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1099 );
1100 let fields_omitted = match &se.rest {
1101 StructRest::Base(e) => {
1102 self.sess
1103 .struct_span_err(
1104 e.span,
1105 "functional record updates are not allowed in destructuring \
1106 assignments",
1107 )
1108 .span_suggestion(
1109 e.span,
1110 "consider removing the trailing pattern",
1111 String::new(),
1112 rustc_errors::Applicability::MachineApplicable,
1113 )
1114 .emit();
1115 true
1116 }
1117 StructRest::Rest(_) => true,
1118 StructRest::None => false,
1119 };
1120 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1121 return self.pat_without_dbm(lhs.span, struct_pat);
1122 }
1123 // Tuples.
1124 ExprKind::Tup(elements) => {
1125 let (pats, rest) =
1126 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1127 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0));
1128 return self.pat_without_dbm(lhs.span, tuple_pat);
1129 }
1130 ExprKind::Paren(e) => {
1131 // We special-case `(..)` for consistency with patterns.
1132 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1133 let tuple_pat = hir::PatKind::Tuple(&[], Some(0));
1134 return self.pat_without_dbm(lhs.span, tuple_pat);
1135 } else {
1136 return self.destructure_assign_mut(e, eq_sign_span, assignments);
1137 }
1138 }
1139 _ => {}
1140 }
1141 // Treat all other cases as normal lvalue.
1142 let ident = Ident::new(sym::lhs, self.lower_span(lhs.span));
1143 let (pat, binding) = self.pat_ident_mut(lhs.span, ident);
1144 let ident = self.expr_ident(lhs.span, ident, binding);
1145 let assign =
1146 hir::ExprKind::Assign(self.lower_expr(lhs), ident, self.lower_span(eq_sign_span));
1147 let expr = self.expr(lhs.span, assign, ThinVec::new());
1148 assignments.push(self.stmt_expr(lhs.span, expr));
1149 pat
1150 }
1151
1152 /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1153 /// Such a sequence occurs in a tuple (struct)/slice.
1154 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1155 /// exists.
1156 /// Each sub-assignment is recorded in `assignments`.
1157 fn destructure_sequence(
1158 &mut self,
1159 elements: &[AstP<Expr>],
1160 ctx: &str,
1161 eq_sign_span: Span,
1162 assignments: &mut Vec<hir::Stmt<'hir>>,
1163 ) -> (&'hir [hir::Pat<'hir>], Option<(usize, Span)>) {
1164 let mut rest = None;
1165 let elements =
1166 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1167 // Check for `..` pattern.
1168 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1169 if let Some((_, prev_span)) = rest {
1170 self.ban_extra_rest_pat(e.span, prev_span, ctx);
1171 } else {
1172 rest = Some((i, e.span));
1173 }
1174 None
1175 } else {
1176 Some(self.destructure_assign_mut(e, eq_sign_span, assignments))
1177 }
1178 }));
1179 (elements, rest)
1180 }
1181
1182 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1183 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1184 let e1 = self.lower_expr_mut(e1);
1185 let e2 = self.lower_expr_mut(e2);
1186 let fn_path =
1187 hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, self.lower_span(span), None);
1188 let fn_expr =
1189 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1190 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1191 }
1192
1193 fn lower_expr_range(
1194 &mut self,
1195 span: Span,
1196 e1: Option<&Expr>,
1197 e2: Option<&Expr>,
1198 lims: RangeLimits,
1199 ) -> hir::ExprKind<'hir> {
1200 use rustc_ast::RangeLimits::*;
1201
1202 let lang_item = match (e1, e2, lims) {
1203 (None, None, HalfOpen) => hir::LangItem::RangeFull,
1204 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1205 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1206 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1207 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1208 (Some(..), Some(..), Closed) => unreachable!(),
1209 (_, None, Closed) => self.diagnostic().span_fatal(span, "inclusive range with no end"),
1210 };
1211
1212 let fields = self.arena.alloc_from_iter(
1213 e1.iter().map(|e| (sym::start, e)).chain(e2.iter().map(|e| (sym::end, e))).map(
1214 |(s, e)| {
1215 let expr = self.lower_expr(&e);
1216 let ident = Ident::new(s, self.lower_span(e.span));
1217 self.expr_field(ident, expr, e.span)
1218 },
1219 ),
1220 );
1221
1222 hir::ExprKind::Struct(
1223 self.arena.alloc(hir::QPath::LangItem(lang_item, self.lower_span(span), None)),
1224 fields,
1225 None,
1226 )
1227 }
1228
1229 fn lower_label(&self, opt_label: Option<Label>) -> Option<Label> {
1230 let label = opt_label?;
1231 Some(Label { ident: self.lower_ident(label.ident) })
1232 }
1233
1234 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1235 let target_id = match destination {
1236 Some((id, _)) => {
1237 if let Some(loop_id) = self.resolver.get_label_res(id) {
1238 Ok(self.lower_node_id(loop_id))
1239 } else {
1240 Err(hir::LoopIdError::UnresolvedLabel)
1241 }
1242 }
1243 None => self
1244 .loop_scope
1245 .map(|id| Ok(self.lower_node_id(id)))
1246 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1247 };
1248 let label = self.lower_label(destination.map(|(_, label)| label));
1249 hir::Destination { label, target_id }
1250 }
1251
1252 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1253 if self.is_in_loop_condition && opt_label.is_none() {
1254 hir::Destination {
1255 label: None,
1256 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1257 }
1258 } else {
1259 self.lower_loop_destination(opt_label.map(|label| (id, label)))
1260 }
1261 }
1262
1263 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1264 let old_scope = self.catch_scope.replace(catch_id);
1265 let result = f(self);
1266 self.catch_scope = old_scope;
1267 result
1268 }
1269
1270 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1271 // We're no longer in the base loop's condition; we're in another loop.
1272 let was_in_loop_condition = self.is_in_loop_condition;
1273 self.is_in_loop_condition = false;
1274
1275 let old_scope = self.loop_scope.replace(loop_id);
1276 let result = f(self);
1277 self.loop_scope = old_scope;
1278
1279 self.is_in_loop_condition = was_in_loop_condition;
1280
1281 result
1282 }
1283
1284 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1285 let was_in_loop_condition = self.is_in_loop_condition;
1286 self.is_in_loop_condition = true;
1287
1288 let result = f(self);
1289
1290 self.is_in_loop_condition = was_in_loop_condition;
1291
1292 result
1293 }
1294
1295 fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
1296 hir::ExprField {
1297 hir_id: self.next_id(),
1298 ident: self.lower_ident(f.ident),
1299 expr: self.lower_expr(&f.expr),
1300 span: self.lower_span(f.span),
1301 is_shorthand: f.is_shorthand,
1302 }
1303 }
1304
1305 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1306 match self.generator_kind {
1307 Some(hir::GeneratorKind::Gen) => {}
1308 Some(hir::GeneratorKind::Async(_)) => {
1309 struct_span_err!(
1310 self.sess,
1311 span,
1312 E0727,
1313 "`async` generators are not yet supported"
1314 )
1315 .emit();
1316 }
1317 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1318 }
1319
1320 let expr =
1321 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1322
1323 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1324 }
1325
1326 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1327 /// ```rust
1328 /// {
1329 /// let result = match IntoIterator::into_iter(<head>) {
1330 /// mut iter => {
1331 /// [opt_ident]: loop {
1332 /// match Iterator::next(&mut iter) {
1333 /// None => break,
1334 /// Some(<pat>) => <body>,
1335 /// };
1336 /// }
1337 /// }
1338 /// };
1339 /// result
1340 /// }
1341 /// ```
1342 fn lower_expr_for(
1343 &mut self,
1344 e: &Expr,
1345 pat: &Pat,
1346 head: &Expr,
1347 body: &Block,
1348 opt_label: Option<Label>,
1349 ) -> hir::Expr<'hir> {
1350 let head = self.lower_expr_mut(head);
1351 let pat = self.lower_pat(pat);
1352 let for_span =
1353 self.mark_span_with_reason(DesugaringKind::ForLoop, self.lower_span(e.span), None);
1354 let head_span = self.mark_span_with_reason(DesugaringKind::ForLoop, head.span, None);
1355 let pat_span = self.mark_span_with_reason(DesugaringKind::ForLoop, pat.span, None);
1356
1357 // `None => break`
1358 let none_arm = {
1359 let break_expr =
1360 self.with_loop_scope(e.id, |this| this.expr_break_alloc(for_span, ThinVec::new()));
1361 let pat = self.pat_none(for_span);
1362 self.arm(pat, break_expr)
1363 };
1364
1365 // Some(<pat>) => <body>,
1366 let some_arm = {
1367 let some_pat = self.pat_some(pat_span, pat);
1368 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1369 let body_expr = self.arena.alloc(self.expr_block(body_block, ThinVec::new()));
1370 self.arm(some_pat, body_expr)
1371 };
1372
1373 // `mut iter`
1374 let iter = Ident::with_dummy_span(sym::iter);
1375 let (iter_pat, iter_pat_nid) =
1376 self.pat_ident_binding_mode(head_span, iter, hir::BindingAnnotation::Mutable);
1377
1378 // `match Iterator::next(&mut iter) { ... }`
1379 let match_expr = {
1380 let iter = self.expr_ident(head_span, iter, iter_pat_nid);
1381 let ref_mut_iter = self.expr_mut_addr_of(head_span, iter);
1382 let next_expr = self.expr_call_lang_item_fn(
1383 head_span,
1384 hir::LangItem::IteratorNext,
1385 arena_vec![self; ref_mut_iter],
1386 None,
1387 );
1388 let arms = arena_vec![self; none_arm, some_arm];
1389
1390 self.expr_match(head_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1391 };
1392 let match_stmt = self.stmt_expr(for_span, match_expr);
1393
1394 let loop_block = self.block_all(for_span, arena_vec![self; match_stmt], None);
1395
1396 // `[opt_ident]: loop { ... }`
1397 let kind = hir::ExprKind::Loop(
1398 loop_block,
1399 self.lower_label(opt_label),
1400 hir::LoopSource::ForLoop,
1401 self.lower_span(for_span.with_hi(head.span.hi())),
1402 );
1403 let loop_expr =
1404 self.arena.alloc(hir::Expr { hir_id: self.lower_node_id(e.id), kind, span: for_span });
1405
1406 // `mut iter => { ... }`
1407 let iter_arm = self.arm(iter_pat, loop_expr);
1408
1409 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1410 let into_iter_expr = {
1411 self.expr_call_lang_item_fn(
1412 head_span,
1413 hir::LangItem::IntoIterIntoIter,
1414 arena_vec![self; head],
1415 None,
1416 )
1417 };
1418
1419 let match_expr = self.arena.alloc(self.expr_match(
1420 for_span,
1421 into_iter_expr,
1422 arena_vec![self; iter_arm],
1423 hir::MatchSource::ForLoopDesugar,
1424 ));
1425
1426 let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1427
1428 // This is effectively `{ let _result = ...; _result }`.
1429 // The construct was introduced in #21984 and is necessary to make sure that
1430 // temporaries in the `head` expression are dropped and do not leak to the
1431 // surrounding scope of the `match` since the `match` is not a terminating scope.
1432 //
1433 // Also, add the attributes to the outer returned expr node.
1434 self.expr_drop_temps_mut(for_span, match_expr, attrs.into())
1435 }
1436
1437 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1438 /// ```rust
1439 /// match Try::branch(<expr>) {
1440 /// ControlFlow::Continue(val) => #[allow(unreachable_code)] val,,
1441 /// ControlFlow::Break(residual) =>
1442 /// #[allow(unreachable_code)]
1443 /// // If there is an enclosing `try {...}`:
1444 /// break 'catch_target Try::from_residual(residual),
1445 /// // Otherwise:
1446 /// return Try::from_residual(residual),
1447 /// }
1448 /// ```
1449 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1450 let unstable_span = self.mark_span_with_reason(
1451 DesugaringKind::QuestionMark,
1452 span,
1453 self.allow_try_trait.clone(),
1454 );
1455 let try_span = self.sess.source_map().end_point(span);
1456 let try_span = self.mark_span_with_reason(
1457 DesugaringKind::QuestionMark,
1458 try_span,
1459 self.allow_try_trait.clone(),
1460 );
1461
1462 // `Try::branch(<expr>)`
1463 let scrutinee = {
1464 // expand <expr>
1465 let sub_expr = self.lower_expr_mut(sub_expr);
1466
1467 self.expr_call_lang_item_fn(
1468 unstable_span,
1469 hir::LangItem::TryTraitBranch,
1470 arena_vec![self; sub_expr],
1471 None,
1472 )
1473 };
1474
1475 // `#[allow(unreachable_code)]`
1476 let attr = {
1477 // `allow(unreachable_code)`
1478 let allow = {
1479 let allow_ident = Ident::new(sym::allow, self.lower_span(span));
1480 let uc_ident = Ident::new(sym::unreachable_code, self.lower_span(span));
1481 let uc_nested = attr::mk_nested_word_item(uc_ident);
1482 attr::mk_list_item(allow_ident, vec![uc_nested])
1483 };
1484 attr::mk_attr_outer(allow)
1485 };
1486 let attrs = vec![attr];
1487
1488 // `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,`
1489 let continue_arm = {
1490 let val_ident = Ident::with_dummy_span(sym::val);
1491 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1492 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1493 span,
1494 val_ident,
1495 val_pat_nid,
1496 ThinVec::from(attrs.clone()),
1497 ));
1498 let continue_pat = self.pat_cf_continue(unstable_span, val_pat);
1499 self.arm(continue_pat, val_expr)
1500 };
1501
1502 // `ControlFlow::Break(residual) =>
1503 // #[allow(unreachable_code)]
1504 // return Try::from_residual(residual),`
1505 let break_arm = {
1506 let residual_ident = Ident::with_dummy_span(sym::residual);
1507 let (residual_local, residual_local_nid) = self.pat_ident(try_span, residual_ident);
1508 let residual_expr = self.expr_ident_mut(try_span, residual_ident, residual_local_nid);
1509 let from_residual_expr = self.wrap_in_try_constructor(
1510 hir::LangItem::TryTraitFromResidual,
1511 try_span,
1512 self.arena.alloc(residual_expr),
1513 unstable_span,
1514 );
1515 let thin_attrs = ThinVec::from(attrs);
1516 let ret_expr = if let Some(catch_node) = self.catch_scope {
1517 let target_id = Ok(self.lower_node_id(catch_node));
1518 self.arena.alloc(self.expr(
1519 try_span,
1520 hir::ExprKind::Break(
1521 hir::Destination { label: None, target_id },
1522 Some(from_residual_expr),
1523 ),
1524 thin_attrs,
1525 ))
1526 } else {
1527 self.arena.alloc(self.expr(
1528 try_span,
1529 hir::ExprKind::Ret(Some(from_residual_expr)),
1530 thin_attrs,
1531 ))
1532 };
1533
1534 let break_pat = self.pat_cf_break(try_span, residual_local);
1535 self.arm(break_pat, ret_expr)
1536 };
1537
1538 hir::ExprKind::Match(
1539 scrutinee,
1540 arena_vec![self; break_arm, continue_arm],
1541 hir::MatchSource::TryDesugar,
1542 )
1543 }
1544
1545 // =========================================================================
1546 // Helper methods for building HIR.
1547 // =========================================================================
1548
1549 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1550 ///
1551 /// In terms of drop order, it has the same effect as wrapping `expr` in
1552 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1553 ///
1554 /// The drop order can be important in e.g. `if expr { .. }`.
1555 pub(super) fn expr_drop_temps(
1556 &mut self,
1557 span: Span,
1558 expr: &'hir hir::Expr<'hir>,
1559 attrs: AttrVec,
1560 ) -> &'hir hir::Expr<'hir> {
1561 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1562 }
1563
1564 pub(super) fn expr_drop_temps_mut(
1565 &mut self,
1566 span: Span,
1567 expr: &'hir hir::Expr<'hir>,
1568 attrs: AttrVec,
1569 ) -> hir::Expr<'hir> {
1570 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1571 }
1572
1573 fn expr_match(
1574 &mut self,
1575 span: Span,
1576 arg: &'hir hir::Expr<'hir>,
1577 arms: &'hir [hir::Arm<'hir>],
1578 source: hir::MatchSource,
1579 ) -> hir::Expr<'hir> {
1580 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
1581 }
1582
1583 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> hir::Expr<'hir> {
1584 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1585 self.expr(span, expr_break, attrs)
1586 }
1587
1588 fn expr_break_alloc(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1589 let expr_break = self.expr_break(span, attrs);
1590 self.arena.alloc(expr_break)
1591 }
1592
1593 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1594 self.expr(
1595 span,
1596 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1597 ThinVec::new(),
1598 )
1599 }
1600
1601 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1602 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
1603 }
1604
1605 fn expr_call_mut(
1606 &mut self,
1607 span: Span,
1608 e: &'hir hir::Expr<'hir>,
1609 args: &'hir [hir::Expr<'hir>],
1610 ) -> hir::Expr<'hir> {
1611 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
1612 }
1613
1614 fn expr_call(
1615 &mut self,
1616 span: Span,
1617 e: &'hir hir::Expr<'hir>,
1618 args: &'hir [hir::Expr<'hir>],
1619 ) -> &'hir hir::Expr<'hir> {
1620 self.arena.alloc(self.expr_call_mut(span, e, args))
1621 }
1622
1623 fn expr_call_lang_item_fn_mut(
1624 &mut self,
1625 span: Span,
1626 lang_item: hir::LangItem,
1627 args: &'hir [hir::Expr<'hir>],
1628 hir_id: Option<hir::HirId>,
1629 ) -> hir::Expr<'hir> {
1630 let path =
1631 self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new(), hir_id));
1632 self.expr_call_mut(span, path, args)
1633 }
1634
1635 fn expr_call_lang_item_fn(
1636 &mut self,
1637 span: Span,
1638 lang_item: hir::LangItem,
1639 args: &'hir [hir::Expr<'hir>],
1640 hir_id: Option<hir::HirId>,
1641 ) -> &'hir hir::Expr<'hir> {
1642 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args, hir_id))
1643 }
1644
1645 fn expr_lang_item_path(
1646 &mut self,
1647 span: Span,
1648 lang_item: hir::LangItem,
1649 attrs: AttrVec,
1650 hir_id: Option<hir::HirId>,
1651 ) -> hir::Expr<'hir> {
1652 self.expr(
1653 span,
1654 hir::ExprKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span), hir_id)),
1655 attrs,
1656 )
1657 }
1658
1659 pub(super) fn expr_ident(
1660 &mut self,
1661 sp: Span,
1662 ident: Ident,
1663 binding: hir::HirId,
1664 ) -> &'hir hir::Expr<'hir> {
1665 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1666 }
1667
1668 pub(super) fn expr_ident_mut(
1669 &mut self,
1670 sp: Span,
1671 ident: Ident,
1672 binding: hir::HirId,
1673 ) -> hir::Expr<'hir> {
1674 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
1675 }
1676
1677 fn expr_ident_with_attrs(
1678 &mut self,
1679 span: Span,
1680 ident: Ident,
1681 binding: hir::HirId,
1682 attrs: AttrVec,
1683 ) -> hir::Expr<'hir> {
1684 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1685 None,
1686 self.arena.alloc(hir::Path {
1687 span: self.lower_span(span),
1688 res: Res::Local(binding),
1689 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
1690 }),
1691 ));
1692
1693 self.expr(span, expr_path, attrs)
1694 }
1695
1696 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1697 let hir_id = self.next_id();
1698 let span = expr.span;
1699 self.expr(
1700 span,
1701 hir::ExprKind::Block(
1702 self.arena.alloc(hir::Block {
1703 stmts: &[],
1704 expr: Some(expr),
1705 hir_id,
1706 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
1707 span: self.lower_span(span),
1708 targeted_by_break: false,
1709 }),
1710 None,
1711 ),
1712 ThinVec::new(),
1713 )
1714 }
1715
1716 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
1717 let blk = self.block_all(span, &[], None);
1718 let expr = self.expr_block(blk, ThinVec::new());
1719 self.arena.alloc(expr)
1720 }
1721
1722 pub(super) fn expr_block(
1723 &mut self,
1724 b: &'hir hir::Block<'hir>,
1725 attrs: AttrVec,
1726 ) -> hir::Expr<'hir> {
1727 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
1728 }
1729
1730 pub(super) fn expr(
1731 &mut self,
1732 span: Span,
1733 kind: hir::ExprKind<'hir>,
1734 attrs: AttrVec,
1735 ) -> hir::Expr<'hir> {
1736 let hir_id = self.next_id();
1737 self.lower_attrs(hir_id, &attrs);
1738 hir::Expr { hir_id, kind, span: self.lower_span(span) }
1739 }
1740
1741 fn expr_field(
1742 &mut self,
1743 ident: Ident,
1744 expr: &'hir hir::Expr<'hir>,
1745 span: Span,
1746 ) -> hir::ExprField<'hir> {
1747 hir::ExprField {
1748 hir_id: self.next_id(),
1749 ident,
1750 span: self.lower_span(span),
1751 expr,
1752 is_shorthand: false,
1753 }
1754 }
1755
1756 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
1757 hir::Arm {
1758 hir_id: self.next_id(),
1759 pat,
1760 guard: None,
1761 span: self.lower_span(expr.span),
1762 body: expr,
1763 }
1764 }
1765 }