]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_ast_lowering/src/expr.rs
New upstream version 1.52.0~beta.3+dfsg1
[rustc.git] / compiler / rustc_ast_lowering / src / expr.rs
1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
2
3 use rustc_ast::attr;
4 use rustc_ast::ptr::P as AstP;
5 use rustc_ast::*;
6 use rustc_data_structures::fx::FxHashMap;
7 use rustc_data_structures::stack::ensure_sufficient_stack;
8 use rustc_data_structures::thin_vec::ThinVec;
9 use rustc_errors::struct_span_err;
10 use rustc_hir as hir;
11 use rustc_hir::def::Res;
12 use rustc_hir::definitions::DefPathData;
13 use rustc_session::parse::feature_err;
14 use rustc_span::hygiene::ExpnId;
15 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
16 use rustc_span::symbol::{sym, Ident, Symbol};
17 use rustc_span::{hygiene::ForLoopLoc, DUMMY_SP};
18 use rustc_target::asm;
19 use std::collections::hash_map::Entry;
20 use std::fmt::Write;
21
22 impl<'hir> LoweringContext<'_, 'hir> {
23 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
24 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
25 }
26
27 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
28 self.arena.alloc(self.lower_expr_mut(e))
29 }
30
31 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
32 ensure_sufficient_stack(|| {
33 let kind = match e.kind {
34 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
35 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
36 ExprKind::ConstBlock(ref anon_const) => {
37 let anon_const = self.lower_anon_const(anon_const);
38 hir::ExprKind::ConstBlock(anon_const)
39 }
40 ExprKind::Repeat(ref expr, ref count) => {
41 let expr = self.lower_expr(expr);
42 let count = self.lower_anon_const(count);
43 hir::ExprKind::Repeat(expr, count)
44 }
45 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
46 ExprKind::Call(ref f, ref args) => {
47 if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) {
48 self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
49 } else {
50 let f = self.lower_expr(f);
51 hir::ExprKind::Call(f, self.lower_exprs(args))
52 }
53 }
54 ExprKind::MethodCall(ref seg, ref args, span) => {
55 let hir_seg = self.arena.alloc(self.lower_path_segment(
56 e.span,
57 seg,
58 ParamMode::Optional,
59 0,
60 ParenthesizedGenericArgs::Err,
61 ImplTraitContext::disallowed(),
62 None,
63 ));
64 let args = self.lower_exprs(args);
65 hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
66 }
67 ExprKind::Binary(binop, ref lhs, ref rhs) => {
68 let binop = self.lower_binop(binop);
69 let lhs = self.lower_expr(lhs);
70 let rhs = self.lower_expr(rhs);
71 hir::ExprKind::Binary(binop, lhs, rhs)
72 }
73 ExprKind::Unary(op, ref ohs) => {
74 let op = self.lower_unop(op);
75 let ohs = self.lower_expr(ohs);
76 hir::ExprKind::Unary(op, ohs)
77 }
78 ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
79 ExprKind::Cast(ref expr, ref ty) => {
80 let expr = self.lower_expr(expr);
81 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
82 hir::ExprKind::Cast(expr, ty)
83 }
84 ExprKind::Type(ref expr, ref ty) => {
85 let expr = self.lower_expr(expr);
86 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
87 hir::ExprKind::Type(expr, ty)
88 }
89 ExprKind::AddrOf(k, m, ref ohs) => {
90 let ohs = self.lower_expr(ohs);
91 hir::ExprKind::AddrOf(k, m, ohs)
92 }
93 ExprKind::Let(ref pat, ref scrutinee) => {
94 self.lower_expr_let(e.span, pat, scrutinee)
95 }
96 ExprKind::If(ref cond, ref then, ref else_opt) => match cond.kind {
97 ExprKind::Let(ref pat, ref scrutinee) => {
98 self.lower_expr_if_let(e.span, pat, scrutinee, then, else_opt.as_deref())
99 }
100 ExprKind::Paren(ref paren) => match paren.peel_parens().kind {
101 ExprKind::Let(ref pat, ref scrutinee) => {
102 // A user has written `if (let Some(x) = foo) {`, we want to avoid
103 // confusing them with mentions of nightly features.
104 // If this logic is changed, you will also likely need to touch
105 // `unused::UnusedParens::check_expr`.
106 self.if_let_expr_with_parens(cond, &paren.peel_parens());
107 self.lower_expr_if_let(
108 e.span,
109 pat,
110 scrutinee,
111 then,
112 else_opt.as_deref(),
113 )
114 }
115 _ => self.lower_expr_if(cond, then, else_opt.as_deref()),
116 },
117 _ => self.lower_expr_if(cond, then, else_opt.as_deref()),
118 },
119 ExprKind::While(ref cond, ref body, opt_label) => self
120 .with_loop_scope(e.id, |this| {
121 this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
122 }),
123 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
124 hir::ExprKind::Loop(
125 this.lower_block(body, false),
126 opt_label,
127 hir::LoopSource::Loop,
128 DUMMY_SP,
129 )
130 }),
131 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
132 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
133 self.lower_expr(expr),
134 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
135 hir::MatchSource::Normal,
136 ),
137 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
138 .make_async_expr(
139 capture_clause,
140 closure_node_id,
141 None,
142 block.span,
143 hir::AsyncGeneratorKind::Block,
144 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
145 ),
146 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
147 ExprKind::Closure(
148 capture_clause,
149 asyncness,
150 movability,
151 ref decl,
152 ref body,
153 fn_decl_span,
154 ) => {
155 if let Async::Yes { closure_id, .. } = asyncness {
156 self.lower_expr_async_closure(
157 capture_clause,
158 closure_id,
159 decl,
160 body,
161 fn_decl_span,
162 )
163 } else {
164 self.lower_expr_closure(
165 capture_clause,
166 movability,
167 decl,
168 body,
169 fn_decl_span,
170 )
171 }
172 }
173 ExprKind::Block(ref blk, opt_label) => {
174 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
175 }
176 ExprKind::Assign(ref el, ref er, span) => {
177 self.lower_expr_assign(el, er, span, e.span)
178 }
179 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
180 self.lower_binop(op),
181 self.lower_expr(el),
182 self.lower_expr(er),
183 ),
184 ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
185 ExprKind::Index(ref el, ref er) => {
186 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
187 }
188 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
189 self.lower_expr_range_closed(e.span, e1, e2)
190 }
191 ExprKind::Range(ref e1, ref e2, lims) => {
192 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
193 }
194 ExprKind::Underscore => {
195 self.sess
196 .struct_span_err(
197 e.span,
198 "in expressions, `_` can only be used on the left-hand side of an assignment",
199 )
200 .span_label(e.span, "`_` not allowed here")
201 .emit();
202 hir::ExprKind::Err
203 }
204 ExprKind::Path(ref qself, ref path) => {
205 let qpath = self.lower_qpath(
206 e.id,
207 qself,
208 path,
209 ParamMode::Optional,
210 ImplTraitContext::disallowed(),
211 );
212 hir::ExprKind::Path(qpath)
213 }
214 ExprKind::Break(opt_label, ref opt_expr) => {
215 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
216 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
217 }
218 ExprKind::Continue(opt_label) => {
219 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
220 }
221 ExprKind::Ret(ref e) => {
222 let e = e.as_ref().map(|x| self.lower_expr(x));
223 hir::ExprKind::Ret(e)
224 }
225 ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
226 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
227 ExprKind::Struct(ref se) => {
228 let rest = match &se.rest {
229 StructRest::Base(e) => Some(self.lower_expr(e)),
230 StructRest::Rest(sp) => {
231 self.sess
232 .struct_span_err(*sp, "base expression required after `..`")
233 .span_label(*sp, "add a base expression here")
234 .emit();
235 Some(&*self.arena.alloc(self.expr_err(*sp)))
236 }
237 StructRest::None => None,
238 };
239 hir::ExprKind::Struct(
240 self.arena.alloc(self.lower_qpath(
241 e.id,
242 &None,
243 &se.path,
244 ParamMode::Optional,
245 ImplTraitContext::disallowed(),
246 )),
247 self.arena
248 .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
249 rest,
250 )
251 }
252 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
253 ExprKind::Err => hir::ExprKind::Err,
254 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
255 ExprKind::Paren(ref ex) => {
256 let mut ex = self.lower_expr_mut(ex);
257 // Include parens in span, but only if it is a super-span.
258 if e.span.contains(ex.span) {
259 ex.span = e.span;
260 }
261 // Merge attributes into the inner expression.
262 if !e.attrs.is_empty() {
263 let old_attrs = self.attrs.get(&ex.hir_id).map(|la| *la).unwrap_or(&[]);
264 self.attrs.insert(
265 ex.hir_id,
266 &*self.arena.alloc_from_iter(
267 e.attrs
268 .iter()
269 .map(|a| self.lower_attr(a))
270 .chain(old_attrs.iter().cloned()),
271 ),
272 );
273 }
274 return ex;
275 }
276
277 // Desugar `ExprForLoop`
278 // from: `[opt_ident]: for <pat> in <head> <body>`
279 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
280 return self.lower_expr_for(e, pat, head, body, opt_label);
281 }
282 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
283 };
284
285 let hir_id = self.lower_node_id(e.id);
286 self.lower_attrs(hir_id, &e.attrs);
287 hir::Expr { hir_id, kind, span: e.span }
288 })
289 }
290
291 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
292 match u {
293 UnOp::Deref => hir::UnOp::Deref,
294 UnOp::Not => hir::UnOp::Not,
295 UnOp::Neg => hir::UnOp::Neg,
296 }
297 }
298
299 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
300 Spanned {
301 node: match b.node {
302 BinOpKind::Add => hir::BinOpKind::Add,
303 BinOpKind::Sub => hir::BinOpKind::Sub,
304 BinOpKind::Mul => hir::BinOpKind::Mul,
305 BinOpKind::Div => hir::BinOpKind::Div,
306 BinOpKind::Rem => hir::BinOpKind::Rem,
307 BinOpKind::And => hir::BinOpKind::And,
308 BinOpKind::Or => hir::BinOpKind::Or,
309 BinOpKind::BitXor => hir::BinOpKind::BitXor,
310 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
311 BinOpKind::BitOr => hir::BinOpKind::BitOr,
312 BinOpKind::Shl => hir::BinOpKind::Shl,
313 BinOpKind::Shr => hir::BinOpKind::Shr,
314 BinOpKind::Eq => hir::BinOpKind::Eq,
315 BinOpKind::Lt => hir::BinOpKind::Lt,
316 BinOpKind::Le => hir::BinOpKind::Le,
317 BinOpKind::Ne => hir::BinOpKind::Ne,
318 BinOpKind::Ge => hir::BinOpKind::Ge,
319 BinOpKind::Gt => hir::BinOpKind::Gt,
320 },
321 span: b.span,
322 }
323 }
324
325 fn lower_legacy_const_generics(
326 &mut self,
327 mut f: Expr,
328 args: Vec<AstP<Expr>>,
329 legacy_args_idx: &[usize],
330 ) -> hir::ExprKind<'hir> {
331 let path = match f.kind {
332 ExprKind::Path(None, ref mut path) => path,
333 _ => unreachable!(),
334 };
335
336 // Split the arguments into const generics and normal arguments
337 let mut real_args = vec![];
338 let mut generic_args = vec![];
339 for (idx, arg) in args.into_iter().enumerate() {
340 if legacy_args_idx.contains(&idx) {
341 let parent_def_id = self.current_hir_id_owner.last().unwrap().0;
342 let node_id = self.resolver.next_node_id();
343
344 // Add a definition for the in-band const def.
345 self.resolver.create_def(
346 parent_def_id,
347 node_id,
348 DefPathData::AnonConst,
349 ExpnId::root(),
350 arg.span,
351 );
352
353 let anon_const = AnonConst { id: node_id, value: arg };
354 generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
355 } else {
356 real_args.push(arg);
357 }
358 }
359
360 // Add generic args to the last element of the path.
361 let last_segment = path.segments.last_mut().unwrap();
362 assert!(last_segment.args.is_none());
363 last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs {
364 span: DUMMY_SP,
365 args: generic_args,
366 })));
367
368 // Now lower everything as normal.
369 let f = self.lower_expr(&f);
370 hir::ExprKind::Call(f, self.lower_exprs(&real_args))
371 }
372
373 fn if_let_expr_with_parens(&mut self, cond: &Expr, paren: &Expr) {
374 let start = cond.span.until(paren.span);
375 let end = paren.span.shrink_to_hi().until(cond.span.shrink_to_hi());
376 self.sess
377 .struct_span_err(
378 vec![start, end],
379 "invalid parentheses around `let` expression in `if let`",
380 )
381 .multipart_suggestion(
382 "`if let` needs to be written without parentheses",
383 vec![(start, String::new()), (end, String::new())],
384 rustc_errors::Applicability::MachineApplicable,
385 )
386 .emit();
387 // Ideally, we'd remove the feature gating of a `let` expression since we are already
388 // complaining about it here, but `feature_gate::check_crate` has already run by now:
389 // self.sess.parse_sess.gated_spans.ungate_last(sym::let_chains, paren.span);
390 }
391
392 /// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
393 /// ```rust
394 /// match scrutinee { pats => true, _ => false }
395 /// ```
396 fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind<'hir> {
397 // If we got here, the `let` expression is not allowed.
398
399 if self.sess.opts.unstable_features.is_nightly_build() {
400 self.sess
401 .struct_span_err(span, "`let` expressions are not supported here")
402 .note(
403 "only supported directly without parentheses in conditions of `if`- and \
404 `while`-expressions, as well as in `let` chains within parentheses",
405 )
406 .emit();
407 } else {
408 self.sess
409 .struct_span_err(span, "expected expression, found statement (`let`)")
410 .note("variable declaration using `let` is a statement")
411 .emit();
412 }
413
414 // For better recovery, we emit:
415 // ```
416 // match scrutinee { pat => true, _ => false }
417 // ```
418 // While this doesn't fully match the user's intent, it has key advantages:
419 // 1. We can avoid using `abort_if_errors`.
420 // 2. We can typeck both `pat` and `scrutinee`.
421 // 3. `pat` is allowed to be refutable.
422 // 4. The return type of the block is `bool` which seems like what the user wanted.
423 let scrutinee = self.lower_expr(scrutinee);
424 let then_arm = {
425 let pat = self.lower_pat(pat);
426 let expr = self.expr_bool(span, true);
427 self.arm(pat, expr)
428 };
429 let else_arm = {
430 let pat = self.pat_wild(span);
431 let expr = self.expr_bool(span, false);
432 self.arm(pat, expr)
433 };
434 hir::ExprKind::Match(
435 scrutinee,
436 arena_vec![self; then_arm, else_arm],
437 hir::MatchSource::Normal,
438 )
439 }
440
441 fn lower_expr_if(
442 &mut self,
443 cond: &Expr,
444 then: &Block,
445 else_opt: Option<&Expr>,
446 ) -> hir::ExprKind<'hir> {
447 macro_rules! make_if {
448 ($opt:expr) => {{
449 let cond = self.lower_expr(cond);
450 let then_expr = self.lower_block_expr(then);
451 hir::ExprKind::If(cond, self.arena.alloc(then_expr), $opt)
452 }};
453 }
454 if let Some(rslt) = else_opt {
455 make_if!(Some(self.lower_expr(rslt)))
456 } else {
457 make_if!(None)
458 }
459 }
460
461 fn lower_expr_if_let(
462 &mut self,
463 span: Span,
464 pat: &Pat,
465 scrutinee: &Expr,
466 then: &Block,
467 else_opt: Option<&Expr>,
468 ) -> hir::ExprKind<'hir> {
469 // FIXME(#53667): handle lowering of && and parens.
470
471 // `_ => else_block` where `else_block` is `{}` if there's `None`:
472 let else_pat = self.pat_wild(span);
473 let (else_expr, contains_else_clause) = match else_opt {
474 None => (self.expr_block_empty(span.shrink_to_hi()), false),
475 Some(els) => (self.lower_expr(els), true),
476 };
477 let else_arm = self.arm(else_pat, else_expr);
478
479 // Handle then + scrutinee:
480 let scrutinee = self.lower_expr(scrutinee);
481 let then_pat = self.lower_pat(pat);
482
483 let then_expr = self.lower_block_expr(then);
484 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
485
486 let desugar = hir::MatchSource::IfLetDesugar { contains_else_clause };
487 hir::ExprKind::Match(scrutinee, arena_vec![self; then_arm, else_arm], desugar)
488 }
489
490 fn lower_expr_while_in_loop_scope(
491 &mut self,
492 span: Span,
493 cond: &Expr,
494 body: &Block,
495 opt_label: Option<Label>,
496 ) -> hir::ExprKind<'hir> {
497 // FIXME(#53667): handle lowering of && and parens.
498
499 // Note that the block AND the condition are evaluated in the loop scope.
500 // This is done to allow `break` from inside the condition of the loop.
501
502 // `_ => break`:
503 let else_arm = {
504 let else_pat = self.pat_wild(span);
505 let else_expr = self.expr_break(span, ThinVec::new());
506 self.arm(else_pat, else_expr)
507 };
508
509 // Handle then + scrutinee:
510 let (then_pat, scrutinee, desugar, source) = match cond.kind {
511 ExprKind::Let(ref pat, ref scrutinee) => {
512 // to:
513 //
514 // [opt_ident]: loop {
515 // match <sub_expr> {
516 // <pat> => <body>,
517 // _ => break
518 // }
519 // }
520 let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee));
521 let pat = self.lower_pat(pat);
522 (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet)
523 }
524 _ => {
525 // We desugar: `'label: while $cond $body` into:
526 //
527 // ```
528 // 'label: loop {
529 // match drop-temps { $cond } {
530 // true => $body,
531 // _ => break,
532 // }
533 // }
534 // ```
535
536 // Lower condition:
537 let cond = self.with_loop_condition_scope(|this| this.lower_expr(cond));
538 let span_block =
539 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
540 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
541 // to preserve drop semantics since `while cond { ... }` does not
542 // let temporaries live outside of `cond`.
543 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
544 // `true => <then>`:
545 let pat = self.pat_bool(span, true);
546 (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While)
547 }
548 };
549 let then_expr = self.lower_block_expr(body);
550 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
551
552 // `match <scrutinee> { ... }`
553 let match_expr =
554 self.expr_match(span, scrutinee, arena_vec![self; then_arm, else_arm], desugar);
555
556 // `[opt_ident]: loop { ... }`
557 hir::ExprKind::Loop(
558 self.block_expr(self.arena.alloc(match_expr)),
559 opt_label,
560 source,
561 span.with_hi(cond.span.hi()),
562 )
563 }
564
565 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_ok(<expr>) }`,
566 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_ok(()) }`
567 /// and save the block id to use it as a break target for desugaring of the `?` operator.
568 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
569 self.with_catch_scope(body.id, |this| {
570 let mut block = this.lower_block_noalloc(body, true);
571
572 // Final expression of the block (if present) or `()` with span at the end of block
573 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
574 (
575 this.mark_span_with_reason(
576 DesugaringKind::TryBlock,
577 expr.span,
578 this.allow_try_trait.clone(),
579 ),
580 expr,
581 )
582 } else {
583 let try_span = this.mark_span_with_reason(
584 DesugaringKind::TryBlock,
585 this.sess.source_map().end_point(body.span),
586 this.allow_try_trait.clone(),
587 );
588
589 (try_span, this.expr_unit(try_span))
590 };
591
592 let ok_wrapped_span =
593 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
594
595 // `::std::ops::Try::from_ok($tail_expr)`
596 block.expr = Some(this.wrap_in_try_constructor(
597 hir::LangItem::TryFromOk,
598 try_span,
599 tail_expr,
600 ok_wrapped_span,
601 ));
602
603 hir::ExprKind::Block(this.arena.alloc(block), None)
604 })
605 }
606
607 fn wrap_in_try_constructor(
608 &mut self,
609 lang_item: hir::LangItem,
610 method_span: Span,
611 expr: &'hir hir::Expr<'hir>,
612 overall_span: Span,
613 ) -> &'hir hir::Expr<'hir> {
614 let constructor =
615 self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new()));
616 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
617 }
618
619 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
620 let pat = self.lower_pat(&arm.pat);
621 let guard = arm.guard.as_ref().map(|cond| {
622 if let ExprKind::Let(ref pat, ref scrutinee) = cond.kind {
623 hir::Guard::IfLet(self.lower_pat(pat), self.lower_expr(scrutinee))
624 } else {
625 hir::Guard::If(self.lower_expr(cond))
626 }
627 });
628 let hir_id = self.next_id();
629 self.lower_attrs(hir_id, &arm.attrs);
630 hir::Arm { hir_id, pat, guard, body: self.lower_expr(&arm.body), span: arm.span }
631 }
632
633 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
634 ///
635 /// This results in:
636 ///
637 /// ```text
638 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
639 /// <body>
640 /// })
641 /// ```
642 pub(super) fn make_async_expr(
643 &mut self,
644 capture_clause: CaptureBy,
645 closure_node_id: NodeId,
646 ret_ty: Option<AstP<Ty>>,
647 span: Span,
648 async_gen_kind: hir::AsyncGeneratorKind,
649 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
650 ) -> hir::ExprKind<'hir> {
651 let output = match ret_ty {
652 Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
653 None => hir::FnRetTy::DefaultReturn(span),
654 };
655
656 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
657 // fully constrained by `future::from_generator`.
658 let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
659
660 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
661 let decl = self.arena.alloc(hir::FnDecl {
662 inputs: arena_vec![self; input_ty],
663 output,
664 c_variadic: false,
665 implicit_self: hir::ImplicitSelfKind::None,
666 });
667
668 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
669 let (pat, task_context_hid) = self.pat_ident_binding_mode(
670 span,
671 Ident::with_dummy_span(sym::_task_context),
672 hir::BindingAnnotation::Mutable,
673 );
674 let param = hir::Param { hir_id: self.next_id(), pat, ty_span: span, span };
675 let params = arena_vec![self; param];
676
677 let body_id = self.lower_body(move |this| {
678 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
679
680 let old_ctx = this.task_context;
681 this.task_context = Some(task_context_hid);
682 let res = body(this);
683 this.task_context = old_ctx;
684 (params, res)
685 });
686
687 // `static |_task_context| -> <ret_ty> { body }`:
688 let generator_kind = hir::ExprKind::Closure(
689 capture_clause,
690 decl,
691 body_id,
692 span,
693 Some(hir::Movability::Static),
694 );
695 let generator =
696 hir::Expr { hir_id: self.lower_node_id(closure_node_id), kind: generator_kind, span };
697
698 // `future::from_generator`:
699 let unstable_span =
700 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
701 let gen_future =
702 self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new());
703
704 // `future::from_generator(generator)`:
705 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
706 }
707
708 /// Desugar `<expr>.await` into:
709 /// ```rust
710 /// match <expr> {
711 /// mut pinned => loop {
712 /// match unsafe { ::std::future::Future::poll(
713 /// <::std::pin::Pin>::new_unchecked(&mut pinned),
714 /// ::std::future::get_context(task_context),
715 /// ) } {
716 /// ::std::task::Poll::Ready(result) => break result,
717 /// ::std::task::Poll::Pending => {}
718 /// }
719 /// task_context = yield ();
720 /// }
721 /// }
722 /// ```
723 fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
724 match self.generator_kind {
725 Some(hir::GeneratorKind::Async(_)) => {}
726 Some(hir::GeneratorKind::Gen) | None => {
727 let mut err = struct_span_err!(
728 self.sess,
729 await_span,
730 E0728,
731 "`await` is only allowed inside `async` functions and blocks"
732 );
733 err.span_label(await_span, "only allowed inside `async` functions and blocks");
734 if let Some(item_sp) = self.current_item {
735 err.span_label(item_sp, "this is not `async`");
736 }
737 err.emit();
738 }
739 }
740 let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
741 let gen_future_span = self.mark_span_with_reason(
742 DesugaringKind::Await,
743 await_span,
744 self.allow_gen_future.clone(),
745 );
746 let expr = self.lower_expr(expr);
747
748 let pinned_ident = Ident::with_dummy_span(sym::pinned);
749 let (pinned_pat, pinned_pat_hid) =
750 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
751
752 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
753
754 // unsafe {
755 // ::std::future::Future::poll(
756 // ::std::pin::Pin::new_unchecked(&mut pinned),
757 // ::std::future::get_context(task_context),
758 // )
759 // }
760 let poll_expr = {
761 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
762 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
763 let task_context = if let Some(task_context_hid) = self.task_context {
764 self.expr_ident_mut(span, task_context_ident, task_context_hid)
765 } else {
766 // Use of `await` outside of an async context, we cannot use `task_context` here.
767 self.expr_err(span)
768 };
769 let new_unchecked = self.expr_call_lang_item_fn_mut(
770 span,
771 hir::LangItem::PinNewUnchecked,
772 arena_vec![self; ref_mut_pinned],
773 );
774 let get_context = self.expr_call_lang_item_fn_mut(
775 gen_future_span,
776 hir::LangItem::GetContext,
777 arena_vec![self; task_context],
778 );
779 let call = self.expr_call_lang_item_fn(
780 span,
781 hir::LangItem::FuturePoll,
782 arena_vec![self; new_unchecked, get_context],
783 );
784 self.arena.alloc(self.expr_unsafe(call))
785 };
786
787 // `::std::task::Poll::Ready(result) => break result`
788 let loop_node_id = self.resolver.next_node_id();
789 let loop_hir_id = self.lower_node_id(loop_node_id);
790 let ready_arm = {
791 let x_ident = Ident::with_dummy_span(sym::result);
792 let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
793 let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
794 let ready_field = self.single_pat_field(span, x_pat);
795 let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
796 let break_x = self.with_loop_scope(loop_node_id, move |this| {
797 let expr_break =
798 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
799 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
800 });
801 self.arm(ready_pat, break_x)
802 };
803
804 // `::std::task::Poll::Pending => {}`
805 let pending_arm = {
806 let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
807 let empty_block = self.expr_block_empty(span);
808 self.arm(pending_pat, empty_block)
809 };
810
811 let inner_match_stmt = {
812 let match_expr = self.expr_match(
813 span,
814 poll_expr,
815 arena_vec![self; ready_arm, pending_arm],
816 hir::MatchSource::AwaitDesugar,
817 );
818 self.stmt_expr(span, match_expr)
819 };
820
821 // task_context = yield ();
822 let yield_stmt = {
823 let unit = self.expr_unit(span);
824 let yield_expr = self.expr(
825 span,
826 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
827 ThinVec::new(),
828 );
829 let yield_expr = self.arena.alloc(yield_expr);
830
831 if let Some(task_context_hid) = self.task_context {
832 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
833 let assign =
834 self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
835 self.stmt_expr(span, assign)
836 } else {
837 // Use of `await` outside of an async context. Return `yield_expr` so that we can
838 // proceed with type checking.
839 self.stmt(span, hir::StmtKind::Semi(yield_expr))
840 }
841 };
842
843 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
844
845 // loop { .. }
846 let loop_expr = self.arena.alloc(hir::Expr {
847 hir_id: loop_hir_id,
848 kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop, span),
849 span,
850 });
851
852 // mut pinned => loop { ... }
853 let pinned_arm = self.arm(pinned_pat, loop_expr);
854
855 // match <expr> {
856 // mut pinned => loop { .. }
857 // }
858 hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
859 }
860
861 fn lower_expr_closure(
862 &mut self,
863 capture_clause: CaptureBy,
864 movability: Movability,
865 decl: &FnDecl,
866 body: &Expr,
867 fn_decl_span: Span,
868 ) -> hir::ExprKind<'hir> {
869 let (body_id, generator_option) = self.with_new_scopes(move |this| {
870 let prev = this.current_item;
871 this.current_item = Some(fn_decl_span);
872 let mut generator_kind = None;
873 let body_id = this.lower_fn_body(decl, |this| {
874 let e = this.lower_expr_mut(body);
875 generator_kind = this.generator_kind;
876 e
877 });
878 let generator_option =
879 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
880 this.current_item = prev;
881 (body_id, generator_option)
882 });
883
884 // Lower outside new scope to preserve `is_in_loop_condition`.
885 let fn_decl = self.lower_fn_decl(decl, None, false, None);
886
887 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, generator_option)
888 }
889
890 fn generator_movability_for_fn(
891 &mut self,
892 decl: &FnDecl,
893 fn_decl_span: Span,
894 generator_kind: Option<hir::GeneratorKind>,
895 movability: Movability,
896 ) -> Option<hir::Movability> {
897 match generator_kind {
898 Some(hir::GeneratorKind::Gen) => {
899 if decl.inputs.len() > 1 {
900 struct_span_err!(
901 self.sess,
902 fn_decl_span,
903 E0628,
904 "too many parameters for a generator (expected 0 or 1 parameters)"
905 )
906 .emit();
907 }
908 Some(movability)
909 }
910 Some(hir::GeneratorKind::Async(_)) => {
911 panic!("non-`async` closure body turned `async` during lowering");
912 }
913 None => {
914 if movability == Movability::Static {
915 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
916 .emit();
917 }
918 None
919 }
920 }
921 }
922
923 fn lower_expr_async_closure(
924 &mut self,
925 capture_clause: CaptureBy,
926 closure_id: NodeId,
927 decl: &FnDecl,
928 body: &Expr,
929 fn_decl_span: Span,
930 ) -> hir::ExprKind<'hir> {
931 let outer_decl =
932 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
933
934 let body_id = self.with_new_scopes(|this| {
935 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
936 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
937 struct_span_err!(
938 this.sess,
939 fn_decl_span,
940 E0708,
941 "`async` non-`move` closures with parameters are not currently supported",
942 )
943 .help(
944 "consider using `let` statements to manually capture \
945 variables by reference before entering an `async move` closure",
946 )
947 .emit();
948 }
949
950 // Transform `async |x: u8| -> X { ... }` into
951 // `|x: u8| future_from_generator(|| -> X { ... })`.
952 let body_id = this.lower_fn_body(&outer_decl, |this| {
953 let async_ret_ty =
954 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
955 let async_body = this.make_async_expr(
956 capture_clause,
957 closure_id,
958 async_ret_ty,
959 body.span,
960 hir::AsyncGeneratorKind::Closure,
961 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
962 );
963 this.expr(fn_decl_span, async_body, ThinVec::new())
964 });
965 body_id
966 });
967
968 // We need to lower the declaration outside the new scope, because we
969 // have to conserve the state of being inside a loop condition for the
970 // closure argument types.
971 let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
972
973 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, None)
974 }
975
976 /// Destructure the LHS of complex assignments.
977 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
978 fn lower_expr_assign(
979 &mut self,
980 lhs: &Expr,
981 rhs: &Expr,
982 eq_sign_span: Span,
983 whole_span: Span,
984 ) -> hir::ExprKind<'hir> {
985 // Return early in case of an ordinary assignment.
986 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
987 match &lhs.kind {
988 ExprKind::Array(..)
989 | ExprKind::Struct(..)
990 | ExprKind::Tup(..)
991 | ExprKind::Underscore => false,
992 // Check for tuple struct constructor.
993 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
994 ExprKind::Paren(e) => {
995 match e.kind {
996 // We special-case `(..)` for consistency with patterns.
997 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
998 _ => is_ordinary(lower_ctx, e),
999 }
1000 }
1001 _ => true,
1002 }
1003 }
1004 if is_ordinary(self, lhs) {
1005 return hir::ExprKind::Assign(self.lower_expr(lhs), self.lower_expr(rhs), eq_sign_span);
1006 }
1007 if !self.sess.features_untracked().destructuring_assignment {
1008 feature_err(
1009 &self.sess.parse_sess,
1010 sym::destructuring_assignment,
1011 eq_sign_span,
1012 "destructuring assignments are unstable",
1013 )
1014 .span_label(lhs.span, "cannot assign to this expression")
1015 .emit();
1016 }
1017
1018 let mut assignments = vec![];
1019
1020 // The LHS becomes a pattern: `(lhs1, lhs2)`.
1021 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
1022 let rhs = self.lower_expr(rhs);
1023
1024 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
1025 let destructure_let = self.stmt_let_pat(
1026 None,
1027 whole_span,
1028 Some(rhs),
1029 pat,
1030 hir::LocalSource::AssignDesugar(eq_sign_span),
1031 );
1032
1033 // `a = lhs1; b = lhs2;`.
1034 let stmts = self
1035 .arena
1036 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
1037
1038 // Wrap everything in a block.
1039 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
1040 }
1041
1042 /// If the given expression is a path to a tuple struct, returns that path.
1043 /// It is not a complete check, but just tries to reject most paths early
1044 /// if they are not tuple structs.
1045 /// Type checking will take care of the full validation later.
1046 fn extract_tuple_struct_path<'a>(&mut self, expr: &'a Expr) -> Option<&'a Path> {
1047 // For tuple struct destructuring, it must be a non-qualified path (like in patterns).
1048 if let ExprKind::Path(None, path) = &expr.kind {
1049 // Does the path resolves to something disallowed in a tuple struct/variant pattern?
1050 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
1051 if partial_res.unresolved_segments() == 0
1052 && !partial_res.base_res().expected_in_tuple_struct_pat()
1053 {
1054 return None;
1055 }
1056 }
1057 return Some(path);
1058 }
1059 None
1060 }
1061
1062 /// Convert the LHS of a destructuring assignment to a pattern.
1063 /// Each sub-assignment is recorded in `assignments`.
1064 fn destructure_assign(
1065 &mut self,
1066 lhs: &Expr,
1067 eq_sign_span: Span,
1068 assignments: &mut Vec<hir::Stmt<'hir>>,
1069 ) -> &'hir hir::Pat<'hir> {
1070 match &lhs.kind {
1071 // Underscore pattern.
1072 ExprKind::Underscore => {
1073 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
1074 }
1075 // Slice patterns.
1076 ExprKind::Array(elements) => {
1077 let (pats, rest) =
1078 self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
1079 let slice_pat = if let Some((i, span)) = rest {
1080 let (before, after) = pats.split_at(i);
1081 hir::PatKind::Slice(
1082 before,
1083 Some(self.pat_without_dbm(span, hir::PatKind::Wild)),
1084 after,
1085 )
1086 } else {
1087 hir::PatKind::Slice(pats, None, &[])
1088 };
1089 return self.pat_without_dbm(lhs.span, slice_pat);
1090 }
1091 // Tuple structs.
1092 ExprKind::Call(callee, args) => {
1093 if let Some(path) = self.extract_tuple_struct_path(callee) {
1094 let (pats, rest) = self.destructure_sequence(
1095 args,
1096 "tuple struct or variant",
1097 eq_sign_span,
1098 assignments,
1099 );
1100 let qpath = self.lower_qpath(
1101 callee.id,
1102 &None,
1103 path,
1104 ParamMode::Optional,
1105 ImplTraitContext::disallowed(),
1106 );
1107 // Destructure like a tuple struct.
1108 let tuple_struct_pat =
1109 hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0));
1110 return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1111 }
1112 }
1113 // Structs.
1114 ExprKind::Struct(se) => {
1115 let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
1116 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1117 hir::PatField {
1118 hir_id: self.next_id(),
1119 ident: f.ident,
1120 pat,
1121 is_shorthand: f.is_shorthand,
1122 span: f.span,
1123 }
1124 }));
1125 let qpath = self.lower_qpath(
1126 lhs.id,
1127 &None,
1128 &se.path,
1129 ParamMode::Optional,
1130 ImplTraitContext::disallowed(),
1131 );
1132 let fields_omitted = match &se.rest {
1133 StructRest::Base(e) => {
1134 self.sess
1135 .struct_span_err(
1136 e.span,
1137 "functional record updates are not allowed in destructuring \
1138 assignments",
1139 )
1140 .span_suggestion(
1141 e.span,
1142 "consider removing the trailing pattern",
1143 String::new(),
1144 rustc_errors::Applicability::MachineApplicable,
1145 )
1146 .emit();
1147 true
1148 }
1149 StructRest::Rest(_) => true,
1150 StructRest::None => false,
1151 };
1152 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1153 return self.pat_without_dbm(lhs.span, struct_pat);
1154 }
1155 // Tuples.
1156 ExprKind::Tup(elements) => {
1157 let (pats, rest) =
1158 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1159 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0));
1160 return self.pat_without_dbm(lhs.span, tuple_pat);
1161 }
1162 ExprKind::Paren(e) => {
1163 // We special-case `(..)` for consistency with patterns.
1164 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1165 let tuple_pat = hir::PatKind::Tuple(&[], Some(0));
1166 return self.pat_without_dbm(lhs.span, tuple_pat);
1167 } else {
1168 return self.destructure_assign(e, eq_sign_span, assignments);
1169 }
1170 }
1171 _ => {}
1172 }
1173 // Treat all other cases as normal lvalue.
1174 let ident = Ident::new(sym::lhs, lhs.span);
1175 let (pat, binding) = self.pat_ident(lhs.span, ident);
1176 let ident = self.expr_ident(lhs.span, ident, binding);
1177 let assign = hir::ExprKind::Assign(self.lower_expr(lhs), ident, eq_sign_span);
1178 let expr = self.expr(lhs.span, assign, ThinVec::new());
1179 assignments.push(self.stmt_expr(lhs.span, expr));
1180 pat
1181 }
1182
1183 /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1184 /// Such a sequence occurs in a tuple (struct)/slice.
1185 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1186 /// exists.
1187 /// Each sub-assignment is recorded in `assignments`.
1188 fn destructure_sequence(
1189 &mut self,
1190 elements: &[AstP<Expr>],
1191 ctx: &str,
1192 eq_sign_span: Span,
1193 assignments: &mut Vec<hir::Stmt<'hir>>,
1194 ) -> (&'hir [&'hir hir::Pat<'hir>], Option<(usize, Span)>) {
1195 let mut rest = None;
1196 let elements =
1197 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1198 // Check for `..` pattern.
1199 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1200 if let Some((_, prev_span)) = rest {
1201 self.ban_extra_rest_pat(e.span, prev_span, ctx);
1202 } else {
1203 rest = Some((i, e.span));
1204 }
1205 None
1206 } else {
1207 Some(self.destructure_assign(e, eq_sign_span, assignments))
1208 }
1209 }));
1210 (elements, rest)
1211 }
1212
1213 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1214 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1215 let e1 = self.lower_expr_mut(e1);
1216 let e2 = self.lower_expr_mut(e2);
1217 let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, span);
1218 let fn_expr =
1219 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1220 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1221 }
1222
1223 fn lower_expr_range(
1224 &mut self,
1225 span: Span,
1226 e1: Option<&Expr>,
1227 e2: Option<&Expr>,
1228 lims: RangeLimits,
1229 ) -> hir::ExprKind<'hir> {
1230 use rustc_ast::RangeLimits::*;
1231
1232 let lang_item = match (e1, e2, lims) {
1233 (None, None, HalfOpen) => hir::LangItem::RangeFull,
1234 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1235 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1236 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1237 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1238 (Some(..), Some(..), Closed) => unreachable!(),
1239 (_, None, Closed) => {
1240 self.diagnostic().span_fatal(span, "inclusive range with no end").raise()
1241 }
1242 };
1243
1244 let fields = self.arena.alloc_from_iter(
1245 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
1246 let expr = self.lower_expr(&e);
1247 let ident = Ident::new(Symbol::intern(s), e.span);
1248 self.expr_field(ident, expr, e.span)
1249 }),
1250 );
1251
1252 hir::ExprKind::Struct(self.arena.alloc(hir::QPath::LangItem(lang_item, span)), fields, None)
1253 }
1254
1255 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1256 let target_id = match destination {
1257 Some((id, _)) => {
1258 if let Some(loop_id) = self.resolver.get_label_res(id) {
1259 Ok(self.lower_node_id(loop_id))
1260 } else {
1261 Err(hir::LoopIdError::UnresolvedLabel)
1262 }
1263 }
1264 None => self
1265 .loop_scopes
1266 .last()
1267 .cloned()
1268 .map(|id| Ok(self.lower_node_id(id)))
1269 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1270 };
1271 hir::Destination { label: destination.map(|(_, label)| label), target_id }
1272 }
1273
1274 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1275 if self.is_in_loop_condition && opt_label.is_none() {
1276 hir::Destination {
1277 label: None,
1278 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1279 }
1280 } else {
1281 self.lower_loop_destination(opt_label.map(|label| (id, label)))
1282 }
1283 }
1284
1285 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1286 let len = self.catch_scopes.len();
1287 self.catch_scopes.push(catch_id);
1288
1289 let result = f(self);
1290 assert_eq!(
1291 len + 1,
1292 self.catch_scopes.len(),
1293 "catch scopes should be added and removed in stack order"
1294 );
1295
1296 self.catch_scopes.pop().unwrap();
1297
1298 result
1299 }
1300
1301 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1302 // We're no longer in the base loop's condition; we're in another loop.
1303 let was_in_loop_condition = self.is_in_loop_condition;
1304 self.is_in_loop_condition = false;
1305
1306 let len = self.loop_scopes.len();
1307 self.loop_scopes.push(loop_id);
1308
1309 let result = f(self);
1310 assert_eq!(
1311 len + 1,
1312 self.loop_scopes.len(),
1313 "loop scopes should be added and removed in stack order"
1314 );
1315
1316 self.loop_scopes.pop().unwrap();
1317
1318 self.is_in_loop_condition = was_in_loop_condition;
1319
1320 result
1321 }
1322
1323 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1324 let was_in_loop_condition = self.is_in_loop_condition;
1325 self.is_in_loop_condition = true;
1326
1327 let result = f(self);
1328
1329 self.is_in_loop_condition = was_in_loop_condition;
1330
1331 result
1332 }
1333
1334 fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
1335 // Rustdoc needs to support asm! from foriegn architectures: don't try
1336 // lowering the register contraints in this case.
1337 let asm_arch = if self.sess.opts.actually_rustdoc { None } else { self.sess.asm_arch };
1338 if asm_arch.is_none() && !self.sess.opts.actually_rustdoc {
1339 struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
1340 }
1341 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
1342 && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
1343 && !self.sess.opts.actually_rustdoc
1344 {
1345 self.sess
1346 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
1347 .emit();
1348 }
1349
1350 // Lower operands to HIR. We use dummy register classes if an error
1351 // occurs during lowering because we still need to be able to produce a
1352 // valid HIR.
1353 let sess = self.sess;
1354 let operands: Vec<_> = asm
1355 .operands
1356 .iter()
1357 .map(|(op, op_sp)| {
1358 let lower_reg = |reg| match reg {
1359 InlineAsmRegOrRegClass::Reg(s) => {
1360 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
1361 asm::InlineAsmReg::parse(
1362 asm_arch,
1363 |feature| sess.target_features.contains(&Symbol::intern(feature)),
1364 &sess.target,
1365 s,
1366 )
1367 .unwrap_or_else(|e| {
1368 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
1369 sess.struct_span_err(*op_sp, &msg).emit();
1370 asm::InlineAsmReg::Err
1371 })
1372 } else {
1373 asm::InlineAsmReg::Err
1374 })
1375 }
1376 InlineAsmRegOrRegClass::RegClass(s) => {
1377 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
1378 asm::InlineAsmRegClass::parse(asm_arch, s).unwrap_or_else(|e| {
1379 let msg = format!("invalid register class `{}`: {}", s.as_str(), e);
1380 sess.struct_span_err(*op_sp, &msg).emit();
1381 asm::InlineAsmRegClass::Err
1382 })
1383 } else {
1384 asm::InlineAsmRegClass::Err
1385 })
1386 }
1387 };
1388
1389 let op = match *op {
1390 InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
1391 reg: lower_reg(reg),
1392 expr: self.lower_expr_mut(expr),
1393 },
1394 InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
1395 reg: lower_reg(reg),
1396 late,
1397 expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1398 },
1399 InlineAsmOperand::InOut { reg, late, ref expr } => {
1400 hir::InlineAsmOperand::InOut {
1401 reg: lower_reg(reg),
1402 late,
1403 expr: self.lower_expr_mut(expr),
1404 }
1405 }
1406 InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
1407 hir::InlineAsmOperand::SplitInOut {
1408 reg: lower_reg(reg),
1409 late,
1410 in_expr: self.lower_expr_mut(in_expr),
1411 out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1412 }
1413 }
1414 InlineAsmOperand::Const { ref expr } => {
1415 hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
1416 }
1417 InlineAsmOperand::Sym { ref expr } => {
1418 hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
1419 }
1420 };
1421 (op, *op_sp)
1422 })
1423 .collect();
1424
1425 // Validate template modifiers against the register classes for the operands
1426 for p in &asm.template {
1427 if let InlineAsmTemplatePiece::Placeholder {
1428 operand_idx,
1429 modifier: Some(modifier),
1430 span: placeholder_span,
1431 } = *p
1432 {
1433 let op_sp = asm.operands[operand_idx].1;
1434 match &operands[operand_idx].0 {
1435 hir::InlineAsmOperand::In { reg, .. }
1436 | hir::InlineAsmOperand::Out { reg, .. }
1437 | hir::InlineAsmOperand::InOut { reg, .. }
1438 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
1439 let class = reg.reg_class();
1440 if class == asm::InlineAsmRegClass::Err {
1441 continue;
1442 }
1443 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
1444 if !valid_modifiers.contains(&modifier) {
1445 let mut err = sess.struct_span_err(
1446 placeholder_span,
1447 "invalid asm template modifier for this register class",
1448 );
1449 err.span_label(placeholder_span, "template modifier");
1450 err.span_label(op_sp, "argument");
1451 if !valid_modifiers.is_empty() {
1452 let mut mods = format!("`{}`", valid_modifiers[0]);
1453 for m in &valid_modifiers[1..] {
1454 let _ = write!(mods, ", `{}`", m);
1455 }
1456 err.note(&format!(
1457 "the `{}` register class supports \
1458 the following template modifiers: {}",
1459 class.name(),
1460 mods
1461 ));
1462 } else {
1463 err.note(&format!(
1464 "the `{}` register class does not support template modifiers",
1465 class.name()
1466 ));
1467 }
1468 err.emit();
1469 }
1470 }
1471 hir::InlineAsmOperand::Const { .. } => {
1472 let mut err = sess.struct_span_err(
1473 placeholder_span,
1474 "asm template modifiers are not allowed for `const` arguments",
1475 );
1476 err.span_label(placeholder_span, "template modifier");
1477 err.span_label(op_sp, "argument");
1478 err.emit();
1479 }
1480 hir::InlineAsmOperand::Sym { .. } => {
1481 let mut err = sess.struct_span_err(
1482 placeholder_span,
1483 "asm template modifiers are not allowed for `sym` arguments",
1484 );
1485 err.span_label(placeholder_span, "template modifier");
1486 err.span_label(op_sp, "argument");
1487 err.emit();
1488 }
1489 }
1490 }
1491 }
1492
1493 let mut used_input_regs = FxHashMap::default();
1494 let mut used_output_regs = FxHashMap::default();
1495 let mut required_features: Vec<&str> = vec![];
1496 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
1497 if let Some(reg) = op.reg() {
1498 // Make sure we don't accidentally carry features from the
1499 // previous iteration.
1500 required_features.clear();
1501
1502 // Validate register classes against currently enabled target
1503 // features. We check that at least one type is available for
1504 // the current target.
1505 let reg_class = reg.reg_class();
1506 if reg_class == asm::InlineAsmRegClass::Err {
1507 continue;
1508 }
1509 for &(_, feature) in reg_class.supported_types(asm_arch.unwrap()) {
1510 if let Some(feature) = feature {
1511 if self.sess.target_features.contains(&Symbol::intern(feature)) {
1512 required_features.clear();
1513 break;
1514 } else {
1515 required_features.push(feature);
1516 }
1517 } else {
1518 required_features.clear();
1519 break;
1520 }
1521 }
1522 // We are sorting primitive strs here and can use unstable sort here
1523 required_features.sort_unstable();
1524 required_features.dedup();
1525 match &required_features[..] {
1526 [] => {}
1527 [feature] => {
1528 let msg = format!(
1529 "register class `{}` requires the `{}` target feature",
1530 reg_class.name(),
1531 feature
1532 );
1533 sess.struct_span_err(op_sp, &msg).emit();
1534 }
1535 features => {
1536 let msg = format!(
1537 "register class `{}` requires at least one target feature: {}",
1538 reg_class.name(),
1539 features.join(", ")
1540 );
1541 sess.struct_span_err(op_sp, &msg).emit();
1542 }
1543 }
1544
1545 // Check for conflicts between explicit register operands.
1546 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
1547 let (input, output) = match op {
1548 hir::InlineAsmOperand::In { .. } => (true, false),
1549 // Late output do not conflict with inputs, but normal outputs do
1550 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
1551 hir::InlineAsmOperand::InOut { .. }
1552 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
1553 hir::InlineAsmOperand::Const { .. } | hir::InlineAsmOperand::Sym { .. } => {
1554 unreachable!()
1555 }
1556 };
1557
1558 // Flag to output the error only once per operand
1559 let mut skip = false;
1560 reg.overlapping_regs(|r| {
1561 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
1562 input| {
1563 match used_regs.entry(r) {
1564 Entry::Occupied(o) => {
1565 if skip {
1566 return;
1567 }
1568 skip = true;
1569
1570 let idx2 = *o.get();
1571 let &(ref op2, op_sp2) = &operands[idx2];
1572 let reg2 = match op2.reg() {
1573 Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
1574 _ => unreachable!(),
1575 };
1576
1577 let msg = format!(
1578 "register `{}` conflicts with register `{}`",
1579 reg.name(),
1580 reg2.name()
1581 );
1582 let mut err = sess.struct_span_err(op_sp, &msg);
1583 err.span_label(op_sp, &format!("register `{}`", reg.name()));
1584 err.span_label(op_sp2, &format!("register `{}`", reg2.name()));
1585
1586 match (op, op2) {
1587 (
1588 hir::InlineAsmOperand::In { .. },
1589 hir::InlineAsmOperand::Out { late, .. },
1590 )
1591 | (
1592 hir::InlineAsmOperand::Out { late, .. },
1593 hir::InlineAsmOperand::In { .. },
1594 ) => {
1595 assert!(!*late);
1596 let out_op_sp = if input { op_sp2 } else { op_sp };
1597 let msg = "use `lateout` instead of \
1598 `out` to avoid conflict";
1599 err.span_help(out_op_sp, msg);
1600 }
1601 _ => {}
1602 }
1603
1604 err.emit();
1605 }
1606 Entry::Vacant(v) => {
1607 v.insert(idx);
1608 }
1609 }
1610 };
1611 if input {
1612 check(&mut used_input_regs, true);
1613 }
1614 if output {
1615 check(&mut used_output_regs, false);
1616 }
1617 });
1618 }
1619 }
1620 }
1621
1622 let operands = self.arena.alloc_from_iter(operands);
1623 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
1624 let line_spans = self.arena.alloc_slice(&asm.line_spans[..]);
1625 let hir_asm = hir::InlineAsm { template, operands, options: asm.options, line_spans };
1626 hir::ExprKind::InlineAsm(self.arena.alloc(hir_asm))
1627 }
1628
1629 fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1630 let inner = hir::LlvmInlineAsmInner {
1631 inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1632 outputs: asm
1633 .outputs
1634 .iter()
1635 .map(|out| hir::LlvmInlineAsmOutput {
1636 constraint: out.constraint,
1637 is_rw: out.is_rw,
1638 is_indirect: out.is_indirect,
1639 span: out.expr.span,
1640 })
1641 .collect(),
1642 asm: asm.asm,
1643 asm_str_style: asm.asm_str_style,
1644 clobbers: asm.clobbers.clone(),
1645 volatile: asm.volatile,
1646 alignstack: asm.alignstack,
1647 dialect: asm.dialect,
1648 };
1649 let hir_asm = hir::LlvmInlineAsm {
1650 inner,
1651 inputs_exprs: self.arena.alloc_from_iter(
1652 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1653 ),
1654 outputs_exprs: self
1655 .arena
1656 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1657 };
1658 hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1659 }
1660
1661 fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
1662 hir::ExprField {
1663 hir_id: self.next_id(),
1664 ident: f.ident,
1665 expr: self.lower_expr(&f.expr),
1666 span: f.span,
1667 is_shorthand: f.is_shorthand,
1668 }
1669 }
1670
1671 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1672 match self.generator_kind {
1673 Some(hir::GeneratorKind::Gen) => {}
1674 Some(hir::GeneratorKind::Async(_)) => {
1675 struct_span_err!(
1676 self.sess,
1677 span,
1678 E0727,
1679 "`async` generators are not yet supported"
1680 )
1681 .emit();
1682 }
1683 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1684 }
1685
1686 let expr =
1687 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1688
1689 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1690 }
1691
1692 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1693 /// ```rust
1694 /// {
1695 /// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1696 /// mut iter => {
1697 /// [opt_ident]: loop {
1698 /// let mut __next;
1699 /// match ::std::iter::Iterator::next(&mut iter) {
1700 /// ::std::option::Option::Some(val) => __next = val,
1701 /// ::std::option::Option::None => break
1702 /// };
1703 /// let <pat> = __next;
1704 /// StmtKind::Expr(<body>);
1705 /// }
1706 /// }
1707 /// };
1708 /// result
1709 /// }
1710 /// ```
1711 fn lower_expr_for(
1712 &mut self,
1713 e: &Expr,
1714 pat: &Pat,
1715 head: &Expr,
1716 body: &Block,
1717 opt_label: Option<Label>,
1718 ) -> hir::Expr<'hir> {
1719 let orig_head_span = head.span;
1720 // expand <head>
1721 let mut head = self.lower_expr_mut(head);
1722 let desugared_span = self.mark_span_with_reason(
1723 DesugaringKind::ForLoop(ForLoopLoc::Head),
1724 orig_head_span,
1725 None,
1726 );
1727 head.span = desugared_span;
1728
1729 let iter = Ident::with_dummy_span(sym::iter);
1730
1731 let next_ident = Ident::with_dummy_span(sym::__next);
1732 let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1733 desugared_span,
1734 next_ident,
1735 hir::BindingAnnotation::Mutable,
1736 );
1737
1738 // `::std::option::Option::Some(val) => __next = val`
1739 let pat_arm = {
1740 let val_ident = Ident::with_dummy_span(sym::val);
1741 let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1742 let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1743 let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1744 let assign = self.arena.alloc(self.expr(
1745 pat.span,
1746 hir::ExprKind::Assign(next_expr, val_expr, pat.span),
1747 ThinVec::new(),
1748 ));
1749 let some_pat = self.pat_some(pat.span, val_pat);
1750 self.arm(some_pat, assign)
1751 };
1752
1753 // `::std::option::Option::None => break`
1754 let break_arm = {
1755 let break_expr =
1756 self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
1757 let pat = self.pat_none(e.span);
1758 self.arm(pat, break_expr)
1759 };
1760
1761 // `mut iter`
1762 let (iter_pat, iter_pat_nid) =
1763 self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1764
1765 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1766 let match_expr = {
1767 let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1768 let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1769 let next_expr = self.expr_call_lang_item_fn(
1770 desugared_span,
1771 hir::LangItem::IteratorNext,
1772 arena_vec![self; ref_mut_iter],
1773 );
1774 let arms = arena_vec![self; pat_arm, break_arm];
1775
1776 self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1777 };
1778 let match_stmt = self.stmt_expr(desugared_span, match_expr);
1779
1780 let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1781
1782 // `let mut __next`
1783 let next_let = self.stmt_let_pat(
1784 None,
1785 desugared_span,
1786 None,
1787 next_pat,
1788 hir::LocalSource::ForLoopDesugar,
1789 );
1790
1791 // `let <pat> = __next`
1792 let pat = self.lower_pat(pat);
1793 let pat_let = self.stmt_let_pat(
1794 None,
1795 desugared_span,
1796 Some(next_expr),
1797 pat,
1798 hir::LocalSource::ForLoopDesugar,
1799 );
1800
1801 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1802 let body_expr = self.expr_block(body_block, ThinVec::new());
1803 let body_stmt = self.stmt_expr(body.span, body_expr);
1804
1805 let loop_block = self.block_all(
1806 e.span,
1807 arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1808 None,
1809 );
1810
1811 // `[opt_ident]: loop { ... }`
1812 let kind = hir::ExprKind::Loop(
1813 loop_block,
1814 opt_label,
1815 hir::LoopSource::ForLoop,
1816 e.span.with_hi(orig_head_span.hi()),
1817 );
1818 let loop_expr =
1819 self.arena.alloc(hir::Expr { hir_id: self.lower_node_id(e.id), kind, span: e.span });
1820
1821 // `mut iter => { ... }`
1822 let iter_arm = self.arm(iter_pat, loop_expr);
1823
1824 let into_iter_span = self.mark_span_with_reason(
1825 DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1826 orig_head_span,
1827 None,
1828 );
1829
1830 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1831 let into_iter_expr = {
1832 self.expr_call_lang_item_fn(
1833 into_iter_span,
1834 hir::LangItem::IntoIterIntoIter,
1835 arena_vec![self; head],
1836 )
1837 };
1838
1839 let match_expr = self.arena.alloc(self.expr_match(
1840 desugared_span,
1841 into_iter_expr,
1842 arena_vec![self; iter_arm],
1843 hir::MatchSource::ForLoopDesugar,
1844 ));
1845
1846 let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1847
1848 // This is effectively `{ let _result = ...; _result }`.
1849 // The construct was introduced in #21984 and is necessary to make sure that
1850 // temporaries in the `head` expression are dropped and do not leak to the
1851 // surrounding scope of the `match` since the `match` is not a terminating scope.
1852 //
1853 // Also, add the attributes to the outer returned expr node.
1854 self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
1855 }
1856
1857 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1858 /// ```rust
1859 /// match Try::into_result(<expr>) {
1860 /// Ok(val) => #[allow(unreachable_code)] val,
1861 /// Err(err) => #[allow(unreachable_code)]
1862 /// // If there is an enclosing `try {...}`:
1863 /// break 'catch_target Try::from_error(From::from(err)),
1864 /// // Otherwise:
1865 /// return Try::from_error(From::from(err)),
1866 /// }
1867 /// ```
1868 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1869 let unstable_span = self.mark_span_with_reason(
1870 DesugaringKind::QuestionMark,
1871 span,
1872 self.allow_try_trait.clone(),
1873 );
1874 let try_span = self.sess.source_map().end_point(span);
1875 let try_span = self.mark_span_with_reason(
1876 DesugaringKind::QuestionMark,
1877 try_span,
1878 self.allow_try_trait.clone(),
1879 );
1880
1881 // `Try::into_result(<expr>)`
1882 let scrutinee = {
1883 // expand <expr>
1884 let sub_expr = self.lower_expr_mut(sub_expr);
1885
1886 self.expr_call_lang_item_fn(
1887 unstable_span,
1888 hir::LangItem::TryIntoResult,
1889 arena_vec![self; sub_expr],
1890 )
1891 };
1892
1893 // `#[allow(unreachable_code)]`
1894 let attr = {
1895 // `allow(unreachable_code)`
1896 let allow = {
1897 let allow_ident = Ident::new(sym::allow, span);
1898 let uc_ident = Ident::new(sym::unreachable_code, span);
1899 let uc_nested = attr::mk_nested_word_item(uc_ident);
1900 attr::mk_list_item(allow_ident, vec![uc_nested])
1901 };
1902 attr::mk_attr_outer(allow)
1903 };
1904 let attrs = vec![attr];
1905
1906 // `Ok(val) => #[allow(unreachable_code)] val,`
1907 let ok_arm = {
1908 let val_ident = Ident::with_dummy_span(sym::val);
1909 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1910 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1911 span,
1912 val_ident,
1913 val_pat_nid,
1914 ThinVec::from(attrs.clone()),
1915 ));
1916 let ok_pat = self.pat_ok(span, val_pat);
1917 self.arm(ok_pat, val_expr)
1918 };
1919
1920 // `Err(err) => #[allow(unreachable_code)]
1921 // return Try::from_error(From::from(err)),`
1922 let err_arm = {
1923 let err_ident = Ident::with_dummy_span(sym::err);
1924 let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
1925 let from_expr = {
1926 let err_expr = self.expr_ident_mut(try_span, err_ident, err_local_nid);
1927 self.expr_call_lang_item_fn(
1928 try_span,
1929 hir::LangItem::FromFrom,
1930 arena_vec![self; err_expr],
1931 )
1932 };
1933 let from_err_expr = self.wrap_in_try_constructor(
1934 hir::LangItem::TryFromError,
1935 unstable_span,
1936 from_expr,
1937 unstable_span,
1938 );
1939 let thin_attrs = ThinVec::from(attrs);
1940 let catch_scope = self.catch_scopes.last().copied();
1941 let ret_expr = if let Some(catch_node) = catch_scope {
1942 let target_id = Ok(self.lower_node_id(catch_node));
1943 self.arena.alloc(self.expr(
1944 try_span,
1945 hir::ExprKind::Break(
1946 hir::Destination { label: None, target_id },
1947 Some(from_err_expr),
1948 ),
1949 thin_attrs,
1950 ))
1951 } else {
1952 self.arena.alloc(self.expr(
1953 try_span,
1954 hir::ExprKind::Ret(Some(from_err_expr)),
1955 thin_attrs,
1956 ))
1957 };
1958
1959 let err_pat = self.pat_err(try_span, err_local);
1960 self.arm(err_pat, ret_expr)
1961 };
1962
1963 hir::ExprKind::Match(
1964 scrutinee,
1965 arena_vec![self; err_arm, ok_arm],
1966 hir::MatchSource::TryDesugar,
1967 )
1968 }
1969
1970 // =========================================================================
1971 // Helper methods for building HIR.
1972 // =========================================================================
1973
1974 /// Constructs a `true` or `false` literal expression.
1975 pub(super) fn expr_bool(&mut self, span: Span, val: bool) -> &'hir hir::Expr<'hir> {
1976 let lit = Spanned { span, node: LitKind::Bool(val) };
1977 self.arena.alloc(self.expr(span, hir::ExprKind::Lit(lit), ThinVec::new()))
1978 }
1979
1980 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1981 ///
1982 /// In terms of drop order, it has the same effect as wrapping `expr` in
1983 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1984 ///
1985 /// The drop order can be important in e.g. `if expr { .. }`.
1986 pub(super) fn expr_drop_temps(
1987 &mut self,
1988 span: Span,
1989 expr: &'hir hir::Expr<'hir>,
1990 attrs: AttrVec,
1991 ) -> &'hir hir::Expr<'hir> {
1992 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1993 }
1994
1995 pub(super) fn expr_drop_temps_mut(
1996 &mut self,
1997 span: Span,
1998 expr: &'hir hir::Expr<'hir>,
1999 attrs: AttrVec,
2000 ) -> hir::Expr<'hir> {
2001 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
2002 }
2003
2004 fn expr_match(
2005 &mut self,
2006 span: Span,
2007 arg: &'hir hir::Expr<'hir>,
2008 arms: &'hir [hir::Arm<'hir>],
2009 source: hir::MatchSource,
2010 ) -> hir::Expr<'hir> {
2011 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
2012 }
2013
2014 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
2015 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
2016 self.arena.alloc(self.expr(span, expr_break, attrs))
2017 }
2018
2019 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2020 self.expr(
2021 span,
2022 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
2023 ThinVec::new(),
2024 )
2025 }
2026
2027 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
2028 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
2029 }
2030
2031 fn expr_call_mut(
2032 &mut self,
2033 span: Span,
2034 e: &'hir hir::Expr<'hir>,
2035 args: &'hir [hir::Expr<'hir>],
2036 ) -> hir::Expr<'hir> {
2037 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
2038 }
2039
2040 fn expr_call(
2041 &mut self,
2042 span: Span,
2043 e: &'hir hir::Expr<'hir>,
2044 args: &'hir [hir::Expr<'hir>],
2045 ) -> &'hir hir::Expr<'hir> {
2046 self.arena.alloc(self.expr_call_mut(span, e, args))
2047 }
2048
2049 fn expr_call_lang_item_fn_mut(
2050 &mut self,
2051 span: Span,
2052 lang_item: hir::LangItem,
2053 args: &'hir [hir::Expr<'hir>],
2054 ) -> hir::Expr<'hir> {
2055 let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new()));
2056 self.expr_call_mut(span, path, args)
2057 }
2058
2059 fn expr_call_lang_item_fn(
2060 &mut self,
2061 span: Span,
2062 lang_item: hir::LangItem,
2063 args: &'hir [hir::Expr<'hir>],
2064 ) -> &'hir hir::Expr<'hir> {
2065 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
2066 }
2067
2068 fn expr_lang_item_path(
2069 &mut self,
2070 span: Span,
2071 lang_item: hir::LangItem,
2072 attrs: AttrVec,
2073 ) -> hir::Expr<'hir> {
2074 self.expr(span, hir::ExprKind::Path(hir::QPath::LangItem(lang_item, span)), attrs)
2075 }
2076
2077 pub(super) fn expr_ident(
2078 &mut self,
2079 sp: Span,
2080 ident: Ident,
2081 binding: hir::HirId,
2082 ) -> &'hir hir::Expr<'hir> {
2083 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
2084 }
2085
2086 pub(super) fn expr_ident_mut(
2087 &mut self,
2088 sp: Span,
2089 ident: Ident,
2090 binding: hir::HirId,
2091 ) -> hir::Expr<'hir> {
2092 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
2093 }
2094
2095 fn expr_ident_with_attrs(
2096 &mut self,
2097 span: Span,
2098 ident: Ident,
2099 binding: hir::HirId,
2100 attrs: AttrVec,
2101 ) -> hir::Expr<'hir> {
2102 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
2103 None,
2104 self.arena.alloc(hir::Path {
2105 span,
2106 res: Res::Local(binding),
2107 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
2108 }),
2109 ));
2110
2111 self.expr(span, expr_path, attrs)
2112 }
2113
2114 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2115 let hir_id = self.next_id();
2116 let span = expr.span;
2117 self.expr(
2118 span,
2119 hir::ExprKind::Block(
2120 self.arena.alloc(hir::Block {
2121 stmts: &[],
2122 expr: Some(expr),
2123 hir_id,
2124 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
2125 span,
2126 targeted_by_break: false,
2127 }),
2128 None,
2129 ),
2130 ThinVec::new(),
2131 )
2132 }
2133
2134 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
2135 let blk = self.block_all(span, &[], None);
2136 let expr = self.expr_block(blk, ThinVec::new());
2137 self.arena.alloc(expr)
2138 }
2139
2140 pub(super) fn expr_block(
2141 &mut self,
2142 b: &'hir hir::Block<'hir>,
2143 attrs: AttrVec,
2144 ) -> hir::Expr<'hir> {
2145 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
2146 }
2147
2148 pub(super) fn expr(
2149 &mut self,
2150 span: Span,
2151 kind: hir::ExprKind<'hir>,
2152 attrs: AttrVec,
2153 ) -> hir::Expr<'hir> {
2154 let hir_id = self.next_id();
2155 self.lower_attrs(hir_id, &attrs);
2156 hir::Expr { hir_id, kind, span }
2157 }
2158
2159 fn expr_field(
2160 &mut self,
2161 ident: Ident,
2162 expr: &'hir hir::Expr<'hir>,
2163 span: Span,
2164 ) -> hir::ExprField<'hir> {
2165 hir::ExprField { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
2166 }
2167
2168 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
2169 hir::Arm { hir_id: self.next_id(), pat, guard: None, span: expr.span, body: expr }
2170 }
2171 }