]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir_build/src/thir/cx/expr.rs
New upstream version 1.53.0+dfsg1
[rustc.git] / compiler / rustc_mir_build / src / thir / cx / expr.rs
1 use crate::thir::cx::Cx;
2 use crate::thir::util::UserAnnotatedTyHelpers;
3 use crate::thir::*;
4 use rustc_data_structures::stack::ensure_sufficient_stack;
5 use rustc_hir as hir;
6 use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
7 use rustc_index::vec::Idx;
8 use rustc_middle::hir::place::Place as HirPlace;
9 use rustc_middle::hir::place::PlaceBase as HirPlaceBase;
10 use rustc_middle::hir::place::ProjectionKind as HirProjectionKind;
11 use rustc_middle::mir::interpret::Scalar;
12 use rustc_middle::mir::BorrowKind;
13 use rustc_middle::ty::adjustment::{
14 Adjust, Adjustment, AutoBorrow, AutoBorrowMutability, PointerCast,
15 };
16 use rustc_middle::ty::subst::{InternalSubsts, SubstsRef};
17 use rustc_middle::ty::{self, AdtKind, Ty};
18 use rustc_span::Span;
19
20 use std::iter;
21
22 impl<'thir, 'tcx> Cx<'thir, 'tcx> {
23 /// Mirrors and allocates a single [`hir::Expr`]. If you need to mirror a whole slice
24 /// of expressions, prefer using [`mirror_exprs`].
25 ///
26 /// [`mirror_exprs`]: Self::mirror_exprs
27 crate fn mirror_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) -> &'thir Expr<'thir, 'tcx> {
28 // `mirror_expr` is recursing very deep. Make sure the stack doesn't overflow.
29 ensure_sufficient_stack(|| self.arena.alloc(self.mirror_expr_inner(expr)))
30 }
31
32 /// Mirrors and allocates a slice of [`hir::Expr`]s. They will be allocated as a
33 /// contiguous sequence in memory.
34 crate fn mirror_exprs(&mut self, exprs: &'tcx [hir::Expr<'tcx>]) -> &'thir [Expr<'thir, 'tcx>] {
35 self.arena.alloc_from_iter(exprs.iter().map(|expr| self.mirror_expr_inner(expr)))
36 }
37
38 /// Mirrors a [`hir::Expr`] without allocating it into the arena.
39 /// This is a separate, private function so that [`mirror_expr`] and [`mirror_exprs`] can
40 /// decide how to allocate this expression (alone or within a slice).
41 ///
42 /// [`mirror_expr`]: Self::mirror_expr
43 /// [`mirror_exprs`]: Self::mirror_exprs
44 pub(super) fn mirror_expr_inner(
45 &mut self,
46 hir_expr: &'tcx hir::Expr<'tcx>,
47 ) -> Expr<'thir, 'tcx> {
48 let temp_lifetime = self.region_scope_tree.temporary_scope(hir_expr.hir_id.local_id);
49 let expr_scope =
50 region::Scope { id: hir_expr.hir_id.local_id, data: region::ScopeData::Node };
51
52 debug!("Expr::make_mirror(): id={}, span={:?}", hir_expr.hir_id, hir_expr.span);
53
54 let mut expr = self.make_mirror_unadjusted(hir_expr);
55
56 // Now apply adjustments, if any.
57 for adjustment in self.typeck_results.expr_adjustments(hir_expr) {
58 debug!("make_mirror: expr={:?} applying adjustment={:?}", expr, adjustment);
59 expr = self.apply_adjustment(hir_expr, expr, adjustment);
60 }
61
62 // Next, wrap this up in the expr's scope.
63 expr = Expr {
64 temp_lifetime,
65 ty: expr.ty,
66 span: hir_expr.span,
67 kind: ExprKind::Scope {
68 region_scope: expr_scope,
69 value: self.arena.alloc(expr),
70 lint_level: LintLevel::Explicit(hir_expr.hir_id),
71 },
72 };
73
74 // Finally, create a destruction scope, if any.
75 if let Some(region_scope) =
76 self.region_scope_tree.opt_destruction_scope(hir_expr.hir_id.local_id)
77 {
78 expr = Expr {
79 temp_lifetime,
80 ty: expr.ty,
81 span: hir_expr.span,
82 kind: ExprKind::Scope {
83 region_scope,
84 value: self.arena.alloc(expr),
85 lint_level: LintLevel::Inherited,
86 },
87 };
88 }
89
90 // OK, all done!
91 expr
92 }
93
94 fn apply_adjustment(
95 &mut self,
96 hir_expr: &'tcx hir::Expr<'tcx>,
97 mut expr: Expr<'thir, 'tcx>,
98 adjustment: &Adjustment<'tcx>,
99 ) -> Expr<'thir, 'tcx> {
100 let Expr { temp_lifetime, mut span, .. } = expr;
101
102 // Adjust the span from the block, to the last expression of the
103 // block. This is a better span when returning a mutable reference
104 // with too short a lifetime. The error message will use the span
105 // from the assignment to the return place, which should only point
106 // at the returned value, not the entire function body.
107 //
108 // fn return_short_lived<'a>(x: &'a mut i32) -> &'static mut i32 {
109 // x
110 // // ^ error message points at this expression.
111 // }
112 let mut adjust_span = |expr: &mut Expr<'thir, 'tcx>| {
113 if let ExprKind::Block { body } = &expr.kind {
114 if let Some(ref last_expr) = body.expr {
115 span = last_expr.span;
116 expr.span = span;
117 }
118 }
119 };
120
121 let kind = match adjustment.kind {
122 Adjust::Pointer(PointerCast::Unsize) => {
123 adjust_span(&mut expr);
124 ExprKind::Pointer { cast: PointerCast::Unsize, source: self.arena.alloc(expr) }
125 }
126 Adjust::Pointer(cast) => ExprKind::Pointer { cast, source: self.arena.alloc(expr) },
127 Adjust::NeverToAny => ExprKind::NeverToAny { source: self.arena.alloc(expr) },
128 Adjust::Deref(None) => {
129 adjust_span(&mut expr);
130 ExprKind::Deref { arg: self.arena.alloc(expr) }
131 }
132 Adjust::Deref(Some(deref)) => {
133 // We don't need to do call adjust_span here since
134 // deref coercions always start with a built-in deref.
135 let call = deref.method_call(self.tcx(), expr.ty);
136
137 expr = Expr {
138 temp_lifetime,
139 ty: self
140 .tcx
141 .mk_ref(deref.region, ty::TypeAndMut { ty: expr.ty, mutbl: deref.mutbl }),
142 span,
143 kind: ExprKind::Borrow {
144 borrow_kind: deref.mutbl.to_borrow_kind(),
145 arg: self.arena.alloc(expr),
146 },
147 };
148
149 self.overloaded_place(
150 hir_expr,
151 adjustment.target,
152 Some(call),
153 self.arena.alloc_from_iter(iter::once(expr)),
154 deref.span,
155 )
156 }
157 Adjust::Borrow(AutoBorrow::Ref(_, m)) => {
158 ExprKind::Borrow { borrow_kind: m.to_borrow_kind(), arg: self.arena.alloc(expr) }
159 }
160 Adjust::Borrow(AutoBorrow::RawPtr(mutability)) => {
161 ExprKind::AddressOf { mutability, arg: self.arena.alloc(expr) }
162 }
163 };
164
165 Expr { temp_lifetime, ty: adjustment.target, span, kind }
166 }
167
168 fn make_mirror_unadjusted(&mut self, expr: &'tcx hir::Expr<'tcx>) -> Expr<'thir, 'tcx> {
169 let expr_ty = self.typeck_results().expr_ty(expr);
170 let temp_lifetime = self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
171
172 let kind = match expr.kind {
173 // Here comes the interesting stuff:
174 hir::ExprKind::MethodCall(_, method_span, ref args, fn_span) => {
175 // Rewrite a.b(c) into UFCS form like Trait::b(a, c)
176 let expr = self.method_callee(expr, method_span, None);
177 let args = self.mirror_exprs(args);
178 ExprKind::Call {
179 ty: expr.ty,
180 fun: self.arena.alloc(expr),
181 args,
182 from_hir_call: true,
183 fn_span,
184 }
185 }
186
187 hir::ExprKind::Call(ref fun, ref args) => {
188 if self.typeck_results().is_method_call(expr) {
189 // The callee is something implementing Fn, FnMut, or FnOnce.
190 // Find the actual method implementation being called and
191 // build the appropriate UFCS call expression with the
192 // callee-object as expr parameter.
193
194 // rewrite f(u, v) into FnOnce::call_once(f, (u, v))
195
196 let method = self.method_callee(expr, fun.span, None);
197
198 let arg_tys = args.iter().map(|e| self.typeck_results().expr_ty_adjusted(e));
199 let tupled_args = Expr {
200 ty: self.tcx.mk_tup(arg_tys),
201 temp_lifetime,
202 span: expr.span,
203 kind: ExprKind::Tuple { fields: self.mirror_exprs(args) },
204 };
205
206 ExprKind::Call {
207 ty: method.ty,
208 fun: self.arena.alloc(method),
209 args: self
210 .arena
211 .alloc_from_iter(vec![self.mirror_expr_inner(fun), tupled_args]),
212 from_hir_call: true,
213 fn_span: expr.span,
214 }
215 } else {
216 let adt_data =
217 if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = fun.kind {
218 // Tuple-like ADTs are represented as ExprKind::Call. We convert them here.
219 expr_ty.ty_adt_def().and_then(|adt_def| match path.res {
220 Res::Def(DefKind::Ctor(_, CtorKind::Fn), ctor_id) => {
221 Some((adt_def, adt_def.variant_index_with_ctor_id(ctor_id)))
222 }
223 Res::SelfCtor(..) => Some((adt_def, VariantIdx::new(0))),
224 _ => None,
225 })
226 } else {
227 None
228 };
229 if let Some((adt_def, index)) = adt_data {
230 let substs = self.typeck_results().node_substs(fun.hir_id);
231 let user_provided_types = self.typeck_results().user_provided_types();
232 let user_ty =
233 user_provided_types.get(fun.hir_id).copied().map(|mut u_ty| {
234 if let UserType::TypeOf(ref mut did, _) = &mut u_ty.value {
235 *did = adt_def.did;
236 }
237 u_ty
238 });
239 debug!("make_mirror_unadjusted: (call) user_ty={:?}", user_ty);
240
241 let field_refs =
242 self.arena.alloc_from_iter(args.iter().enumerate().map(|(idx, e)| {
243 FieldExpr { name: Field::new(idx), expr: self.mirror_expr(e) }
244 }));
245 ExprKind::Adt {
246 adt_def,
247 substs,
248 variant_index: index,
249 fields: field_refs,
250 user_ty,
251 base: None,
252 }
253 } else {
254 ExprKind::Call {
255 ty: self.typeck_results().node_type(fun.hir_id),
256 fun: self.mirror_expr(fun),
257 args: self.mirror_exprs(args),
258 from_hir_call: true,
259 fn_span: expr.span,
260 }
261 }
262 }
263 }
264
265 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, mutbl, ref arg) => {
266 ExprKind::Borrow { borrow_kind: mutbl.to_borrow_kind(), arg: self.mirror_expr(arg) }
267 }
268
269 hir::ExprKind::AddrOf(hir::BorrowKind::Raw, mutability, ref arg) => {
270 ExprKind::AddressOf { mutability, arg: self.mirror_expr(arg) }
271 }
272
273 hir::ExprKind::Block(ref blk, _) => ExprKind::Block { body: self.mirror_block(blk) },
274
275 hir::ExprKind::Assign(ref lhs, ref rhs, _) => {
276 ExprKind::Assign { lhs: self.mirror_expr(lhs), rhs: self.mirror_expr(rhs) }
277 }
278
279 hir::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
280 if self.typeck_results().is_method_call(expr) {
281 let lhs = self.mirror_expr_inner(lhs);
282 let rhs = self.mirror_expr_inner(rhs);
283 self.overloaded_operator(expr, self.arena.alloc_from_iter(vec![lhs, rhs]))
284 } else {
285 ExprKind::AssignOp {
286 op: bin_op(op.node),
287 lhs: self.mirror_expr(lhs),
288 rhs: self.mirror_expr(rhs),
289 }
290 }
291 }
292
293 hir::ExprKind::Lit(ref lit) => ExprKind::Literal {
294 literal: self.const_eval_literal(&lit.node, expr_ty, lit.span, false),
295 user_ty: None,
296 const_id: None,
297 },
298
299 hir::ExprKind::Binary(op, ref lhs, ref rhs) => {
300 if self.typeck_results().is_method_call(expr) {
301 let lhs = self.mirror_expr_inner(lhs);
302 let rhs = self.mirror_expr_inner(rhs);
303 self.overloaded_operator(expr, self.arena.alloc_from_iter(vec![lhs, rhs]))
304 } else {
305 // FIXME overflow
306 match op.node {
307 hir::BinOpKind::And => ExprKind::LogicalOp {
308 op: LogicalOp::And,
309 lhs: self.mirror_expr(lhs),
310 rhs: self.mirror_expr(rhs),
311 },
312 hir::BinOpKind::Or => ExprKind::LogicalOp {
313 op: LogicalOp::Or,
314 lhs: self.mirror_expr(lhs),
315 rhs: self.mirror_expr(rhs),
316 },
317
318 _ => {
319 let op = bin_op(op.node);
320 ExprKind::Binary {
321 op,
322 lhs: self.mirror_expr(lhs),
323 rhs: self.mirror_expr(rhs),
324 }
325 }
326 }
327 }
328 }
329
330 hir::ExprKind::Index(ref lhs, ref index) => {
331 if self.typeck_results().is_method_call(expr) {
332 let lhs = self.mirror_expr_inner(lhs);
333 let index = self.mirror_expr_inner(index);
334 self.overloaded_place(
335 expr,
336 expr_ty,
337 None,
338 self.arena.alloc_from_iter(vec![lhs, index]),
339 expr.span,
340 )
341 } else {
342 ExprKind::Index { lhs: self.mirror_expr(lhs), index: self.mirror_expr(index) }
343 }
344 }
345
346 hir::ExprKind::Unary(hir::UnOp::Deref, ref arg) => {
347 if self.typeck_results().is_method_call(expr) {
348 let arg = self.mirror_expr_inner(arg);
349 self.overloaded_place(
350 expr,
351 expr_ty,
352 None,
353 self.arena.alloc_from_iter(iter::once(arg)),
354 expr.span,
355 )
356 } else {
357 ExprKind::Deref { arg: self.mirror_expr(arg) }
358 }
359 }
360
361 hir::ExprKind::Unary(hir::UnOp::Not, ref arg) => {
362 if self.typeck_results().is_method_call(expr) {
363 let arg = self.mirror_expr_inner(arg);
364 self.overloaded_operator(expr, self.arena.alloc_from_iter(iter::once(arg)))
365 } else {
366 ExprKind::Unary { op: UnOp::Not, arg: self.mirror_expr(arg) }
367 }
368 }
369
370 hir::ExprKind::Unary(hir::UnOp::Neg, ref arg) => {
371 if self.typeck_results().is_method_call(expr) {
372 let arg = self.mirror_expr_inner(arg);
373 self.overloaded_operator(expr, self.arena.alloc_from_iter(iter::once(arg)))
374 } else if let hir::ExprKind::Lit(ref lit) = arg.kind {
375 ExprKind::Literal {
376 literal: self.const_eval_literal(&lit.node, expr_ty, lit.span, true),
377 user_ty: None,
378 const_id: None,
379 }
380 } else {
381 ExprKind::Unary { op: UnOp::Neg, arg: self.mirror_expr(arg) }
382 }
383 }
384
385 hir::ExprKind::Struct(ref qpath, ref fields, ref base) => match expr_ty.kind() {
386 ty::Adt(adt, substs) => match adt.adt_kind() {
387 AdtKind::Struct | AdtKind::Union => {
388 let user_provided_types = self.typeck_results().user_provided_types();
389 let user_ty = user_provided_types.get(expr.hir_id).copied();
390 debug!("make_mirror_unadjusted: (struct/union) user_ty={:?}", user_ty);
391 ExprKind::Adt {
392 adt_def: adt,
393 variant_index: VariantIdx::new(0),
394 substs,
395 user_ty,
396 fields: self.field_refs(fields),
397 base: base.as_ref().map(|base| FruInfo {
398 base: self.mirror_expr(base),
399 field_types: self.arena.alloc_from_iter(
400 self.typeck_results().fru_field_types()[expr.hir_id]
401 .iter()
402 .cloned(),
403 ),
404 }),
405 }
406 }
407 AdtKind::Enum => {
408 let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
409 match res {
410 Res::Def(DefKind::Variant, variant_id) => {
411 assert!(base.is_none());
412
413 let index = adt.variant_index_with_id(variant_id);
414 let user_provided_types =
415 self.typeck_results().user_provided_types();
416 let user_ty = user_provided_types.get(expr.hir_id).copied();
417 debug!("make_mirror_unadjusted: (variant) user_ty={:?}", user_ty);
418 ExprKind::Adt {
419 adt_def: adt,
420 variant_index: index,
421 substs,
422 user_ty,
423 fields: self.field_refs(fields),
424 base: None,
425 }
426 }
427 _ => {
428 span_bug!(expr.span, "unexpected res: {:?}", res);
429 }
430 }
431 }
432 },
433 _ => {
434 span_bug!(expr.span, "unexpected type for struct literal: {:?}", expr_ty);
435 }
436 },
437
438 hir::ExprKind::Closure(..) => {
439 let closure_ty = self.typeck_results().expr_ty(expr);
440 let (def_id, substs, movability) = match *closure_ty.kind() {
441 ty::Closure(def_id, substs) => (def_id, UpvarSubsts::Closure(substs), None),
442 ty::Generator(def_id, substs, movability) => {
443 (def_id, UpvarSubsts::Generator(substs), Some(movability))
444 }
445 _ => {
446 span_bug!(expr.span, "closure expr w/o closure type: {:?}", closure_ty);
447 }
448 };
449
450 let upvars = self.arena.alloc_from_iter(
451 self.typeck_results
452 .closure_min_captures_flattened(def_id)
453 .zip(substs.upvar_tys())
454 .map(|(captured_place, ty)| self.capture_upvar(expr, captured_place, ty)),
455 );
456
457 // Convert the closure fake reads, if any, from hir `Place` to ExprRef
458 let fake_reads = match self.typeck_results.closure_fake_reads.get(&def_id) {
459 Some(fake_reads) => fake_reads
460 .iter()
461 .map(|(place, cause, hir_id)| {
462 let expr = self.convert_captured_hir_place(expr, place.clone());
463 let expr_ref: &'thir Expr<'thir, 'tcx> = self.arena.alloc(expr);
464 (expr_ref, *cause, *hir_id)
465 })
466 .collect(),
467 None => Vec::new(),
468 };
469
470 ExprKind::Closure { closure_id: def_id, substs, upvars, movability, fake_reads }
471 }
472
473 hir::ExprKind::Path(ref qpath) => {
474 let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
475 self.convert_path_expr(expr, res)
476 }
477
478 hir::ExprKind::InlineAsm(ref asm) => ExprKind::InlineAsm {
479 template: asm.template,
480 operands: self.arena.alloc_from_iter(asm.operands.iter().map(|(op, _op_sp)| {
481 match *op {
482 hir::InlineAsmOperand::In { reg, ref expr } => {
483 InlineAsmOperand::In { reg, expr: self.mirror_expr(expr) }
484 }
485 hir::InlineAsmOperand::Out { reg, late, ref expr } => {
486 InlineAsmOperand::Out {
487 reg,
488 late,
489 expr: expr.as_ref().map(|expr| self.mirror_expr(expr)),
490 }
491 }
492 hir::InlineAsmOperand::InOut { reg, late, ref expr } => {
493 InlineAsmOperand::InOut { reg, late, expr: self.mirror_expr(expr) }
494 }
495 hir::InlineAsmOperand::SplitInOut {
496 reg,
497 late,
498 ref in_expr,
499 ref out_expr,
500 } => InlineAsmOperand::SplitInOut {
501 reg,
502 late,
503 in_expr: self.mirror_expr(in_expr),
504 out_expr: out_expr.as_ref().map(|expr| self.mirror_expr(expr)),
505 },
506 hir::InlineAsmOperand::Const { ref anon_const } => {
507 let anon_const_def_id = self.tcx.hir().local_def_id(anon_const.hir_id);
508 let value = ty::Const::from_anon_const(self.tcx, anon_const_def_id);
509 let span = self.tcx.hir().span(anon_const.hir_id);
510
511 InlineAsmOperand::Const { value, span }
512 }
513 hir::InlineAsmOperand::Sym { ref expr } => {
514 let qpath = match expr.kind {
515 hir::ExprKind::Path(ref qpath) => qpath,
516 _ => span_bug!(
517 expr.span,
518 "asm `sym` operand should be a path, found {:?}",
519 expr.kind
520 ),
521 };
522 let temp_lifetime =
523 self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
524 let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
525 let ty;
526 match res {
527 Res::Def(DefKind::Fn, _) | Res::Def(DefKind::AssocFn, _) => {
528 ty = self.typeck_results().node_type(expr.hir_id);
529 let user_ty = self.user_substs_applied_to_res(expr.hir_id, res);
530 InlineAsmOperand::SymFn {
531 expr: self.arena.alloc(Expr {
532 ty,
533 temp_lifetime,
534 span: expr.span,
535 kind: ExprKind::Literal {
536 literal: ty::Const::zero_sized(self.tcx, ty),
537 user_ty,
538 const_id: None,
539 },
540 }),
541 }
542 }
543
544 Res::Def(DefKind::Static, def_id) => {
545 InlineAsmOperand::SymStatic { def_id }
546 }
547
548 _ => {
549 self.tcx.sess.span_err(
550 expr.span,
551 "asm `sym` operand must point to a fn or static",
552 );
553
554 // Not a real fn, but we're not reaching codegen anyways...
555 ty = self.tcx.ty_error();
556 InlineAsmOperand::SymFn {
557 expr: self.arena.alloc(Expr {
558 ty,
559 temp_lifetime,
560 span: expr.span,
561 kind: ExprKind::Literal {
562 literal: ty::Const::zero_sized(self.tcx, ty),
563 user_ty: None,
564 const_id: None,
565 },
566 }),
567 }
568 }
569 }
570 }
571 }
572 })),
573 options: asm.options,
574 line_spans: asm.line_spans,
575 },
576
577 hir::ExprKind::LlvmInlineAsm(ref asm) => ExprKind::LlvmInlineAsm {
578 asm: &asm.inner,
579 outputs: self.mirror_exprs(asm.outputs_exprs),
580 inputs: self.mirror_exprs(asm.inputs_exprs),
581 },
582
583 hir::ExprKind::ConstBlock(ref anon_const) => {
584 let anon_const_def_id = self.tcx.hir().local_def_id(anon_const.hir_id);
585 let value = ty::Const::from_anon_const(self.tcx, anon_const_def_id);
586
587 ExprKind::ConstBlock { value }
588 }
589 // Now comes the rote stuff:
590 hir::ExprKind::Repeat(ref v, ref count) => {
591 let count_def_id = self.tcx.hir().local_def_id(count.hir_id);
592 let count = ty::Const::from_anon_const(self.tcx, count_def_id);
593
594 ExprKind::Repeat { value: self.mirror_expr(v), count }
595 }
596 hir::ExprKind::Ret(ref v) => {
597 ExprKind::Return { value: v.as_ref().map(|v| self.mirror_expr(v)) }
598 }
599 hir::ExprKind::Break(dest, ref value) => match dest.target_id {
600 Ok(target_id) => ExprKind::Break {
601 label: region::Scope { id: target_id.local_id, data: region::ScopeData::Node },
602 value: value.as_ref().map(|value| self.mirror_expr(value)),
603 },
604 Err(err) => bug!("invalid loop id for break: {}", err),
605 },
606 hir::ExprKind::Continue(dest) => match dest.target_id {
607 Ok(loop_id) => ExprKind::Continue {
608 label: region::Scope { id: loop_id.local_id, data: region::ScopeData::Node },
609 },
610 Err(err) => bug!("invalid loop id for continue: {}", err),
611 },
612 hir::ExprKind::If(cond, then, else_opt) => ExprKind::If {
613 cond: self.mirror_expr(cond),
614 then: self.mirror_expr(then),
615 else_opt: else_opt.map(|el| self.mirror_expr(el)),
616 },
617 hir::ExprKind::Match(ref discr, ref arms, _) => ExprKind::Match {
618 scrutinee: self.mirror_expr(discr),
619 arms: self.arena.alloc_from_iter(arms.iter().map(|a| self.convert_arm(a))),
620 },
621 hir::ExprKind::Loop(ref body, ..) => {
622 let block_ty = self.typeck_results().node_type(body.hir_id);
623 let temp_lifetime = self.region_scope_tree.temporary_scope(body.hir_id.local_id);
624 let block = self.mirror_block(body);
625 let body = self.arena.alloc(Expr {
626 ty: block_ty,
627 temp_lifetime,
628 span: block.span,
629 kind: ExprKind::Block { body: block },
630 });
631 ExprKind::Loop { body }
632 }
633 hir::ExprKind::Field(ref source, ..) => ExprKind::Field {
634 lhs: self.mirror_expr(source),
635 name: Field::new(self.tcx.field_index(expr.hir_id, self.typeck_results)),
636 },
637 hir::ExprKind::Cast(ref source, ref cast_ty) => {
638 // Check for a user-given type annotation on this `cast`
639 let user_provided_types = self.typeck_results.user_provided_types();
640 let user_ty = user_provided_types.get(cast_ty.hir_id);
641
642 debug!(
643 "cast({:?}) has ty w/ hir_id {:?} and user provided ty {:?}",
644 expr, cast_ty.hir_id, user_ty,
645 );
646
647 // Check to see if this cast is a "coercion cast", where the cast is actually done
648 // using a coercion (or is a no-op).
649 let cast = if self.typeck_results().is_coercion_cast(source.hir_id) {
650 // Convert the lexpr to a vexpr.
651 ExprKind::Use { source: self.mirror_expr(source) }
652 } else if self.typeck_results().expr_ty(source).is_region_ptr() {
653 // Special cased so that we can type check that the element
654 // type of the source matches the pointed to type of the
655 // destination.
656 ExprKind::Pointer {
657 source: self.mirror_expr(source),
658 cast: PointerCast::ArrayToPointer,
659 }
660 } else {
661 // check whether this is casting an enum variant discriminant
662 // to prevent cycles, we refer to the discriminant initializer
663 // which is always an integer and thus doesn't need to know the
664 // enum's layout (or its tag type) to compute it during const eval
665 // Example:
666 // enum Foo {
667 // A,
668 // B = A as isize + 4,
669 // }
670 // The correct solution would be to add symbolic computations to miri,
671 // so we wouldn't have to compute and store the actual value
672 let var = if let hir::ExprKind::Path(ref qpath) = source.kind {
673 let res = self.typeck_results().qpath_res(qpath, source.hir_id);
674 self.typeck_results().node_type(source.hir_id).ty_adt_def().and_then(
675 |adt_def| match res {
676 Res::Def(
677 DefKind::Ctor(CtorOf::Variant, CtorKind::Const),
678 variant_ctor_id,
679 ) => {
680 let idx = adt_def.variant_index_with_ctor_id(variant_ctor_id);
681 let (d, o) = adt_def.discriminant_def_for_variant(idx);
682 use rustc_middle::ty::util::IntTypeExt;
683 let ty = adt_def.repr.discr_type();
684 let ty = ty.to_ty(self.tcx());
685 Some((d, o, ty))
686 }
687 _ => None,
688 },
689 )
690 } else {
691 None
692 };
693
694 let source = if let Some((did, offset, var_ty)) = var {
695 let mk_const = |literal| {
696 self.arena.alloc(Expr {
697 temp_lifetime,
698 ty: var_ty,
699 span: expr.span,
700 kind: ExprKind::Literal { literal, user_ty: None, const_id: None },
701 })
702 };
703 let offset = mk_const(ty::Const::from_bits(
704 self.tcx,
705 offset as u128,
706 self.param_env.and(var_ty),
707 ));
708 match did {
709 Some(did) => {
710 // in case we are offsetting from a computed discriminant
711 // and not the beginning of discriminants (which is always `0`)
712 let substs = InternalSubsts::identity_for_item(self.tcx(), did);
713 let lhs = mk_const(self.tcx().mk_const(ty::Const {
714 val: ty::ConstKind::Unevaluated(ty::Unevaluated {
715 def: ty::WithOptConstParam::unknown(did),
716 substs,
717 promoted: None,
718 }),
719 ty: var_ty,
720 }));
721 let bin =
722 ExprKind::Binary { op: BinOp::Add, lhs: lhs, rhs: offset };
723 self.arena.alloc(Expr {
724 temp_lifetime,
725 ty: var_ty,
726 span: expr.span,
727 kind: bin,
728 })
729 }
730 None => offset,
731 }
732 } else {
733 self.mirror_expr(source)
734 };
735
736 ExprKind::Cast { source: source }
737 };
738
739 if let Some(user_ty) = user_ty {
740 // NOTE: Creating a new Expr and wrapping a Cast inside of it may be
741 // inefficient, revisit this when performance becomes an issue.
742 let cast_expr = self.arena.alloc(Expr {
743 temp_lifetime,
744 ty: expr_ty,
745 span: expr.span,
746 kind: cast,
747 });
748 debug!("make_mirror_unadjusted: (cast) user_ty={:?}", user_ty);
749
750 ExprKind::ValueTypeAscription { source: cast_expr, user_ty: Some(*user_ty) }
751 } else {
752 cast
753 }
754 }
755 hir::ExprKind::Type(ref source, ref ty) => {
756 let user_provided_types = self.typeck_results.user_provided_types();
757 let user_ty = user_provided_types.get(ty.hir_id).copied();
758 debug!("make_mirror_unadjusted: (type) user_ty={:?}", user_ty);
759 let mirrored = self.mirror_expr(source);
760 if source.is_syntactic_place_expr() {
761 ExprKind::PlaceTypeAscription { source: mirrored, user_ty }
762 } else {
763 ExprKind::ValueTypeAscription { source: mirrored, user_ty }
764 }
765 }
766 hir::ExprKind::DropTemps(ref source) => {
767 ExprKind::Use { source: self.mirror_expr(source) }
768 }
769 hir::ExprKind::Box(ref value) => ExprKind::Box { value: self.mirror_expr(value) },
770 hir::ExprKind::Array(ref fields) => {
771 ExprKind::Array { fields: self.mirror_exprs(fields) }
772 }
773 hir::ExprKind::Tup(ref fields) => ExprKind::Tuple { fields: self.mirror_exprs(fields) },
774
775 hir::ExprKind::Yield(ref v, _) => ExprKind::Yield { value: self.mirror_expr(v) },
776 hir::ExprKind::Err => unreachable!(),
777 };
778
779 Expr { temp_lifetime, ty: expr_ty, span: expr.span, kind }
780 }
781
782 fn user_substs_applied_to_res(
783 &mut self,
784 hir_id: hir::HirId,
785 res: Res,
786 ) -> Option<ty::CanonicalUserType<'tcx>> {
787 debug!("user_substs_applied_to_res: res={:?}", res);
788 let user_provided_type = match res {
789 // A reference to something callable -- e.g., a fn, method, or
790 // a tuple-struct or tuple-variant. This has the type of a
791 // `Fn` but with the user-given substitutions.
792 Res::Def(DefKind::Fn, _)
793 | Res::Def(DefKind::AssocFn, _)
794 | Res::Def(DefKind::Ctor(_, CtorKind::Fn), _)
795 | Res::Def(DefKind::Const, _)
796 | Res::Def(DefKind::AssocConst, _) => {
797 self.typeck_results().user_provided_types().get(hir_id).copied()
798 }
799
800 // A unit struct/variant which is used as a value (e.g.,
801 // `None`). This has the type of the enum/struct that defines
802 // this variant -- but with the substitutions given by the
803 // user.
804 Res::Def(DefKind::Ctor(_, CtorKind::Const), _) => {
805 self.user_substs_applied_to_ty_of_hir_id(hir_id)
806 }
807
808 // `Self` is used in expression as a tuple struct constructor or an unit struct constructor
809 Res::SelfCtor(_) => self.user_substs_applied_to_ty_of_hir_id(hir_id),
810
811 _ => bug!("user_substs_applied_to_res: unexpected res {:?} at {:?}", res, hir_id),
812 };
813 debug!("user_substs_applied_to_res: user_provided_type={:?}", user_provided_type);
814 user_provided_type
815 }
816
817 fn method_callee(
818 &mut self,
819 expr: &hir::Expr<'_>,
820 span: Span,
821 overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
822 ) -> Expr<'thir, 'tcx> {
823 let temp_lifetime = self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
824 let (def_id, substs, user_ty) = match overloaded_callee {
825 Some((def_id, substs)) => (def_id, substs, None),
826 None => {
827 let (kind, def_id) =
828 self.typeck_results().type_dependent_def(expr.hir_id).unwrap_or_else(|| {
829 span_bug!(expr.span, "no type-dependent def for method callee")
830 });
831 let user_ty = self.user_substs_applied_to_res(expr.hir_id, Res::Def(kind, def_id));
832 debug!("method_callee: user_ty={:?}", user_ty);
833 (def_id, self.typeck_results().node_substs(expr.hir_id), user_ty)
834 }
835 };
836 let ty = self.tcx().mk_fn_def(def_id, substs);
837 Expr {
838 temp_lifetime,
839 ty,
840 span,
841 kind: ExprKind::Literal {
842 literal: ty::Const::zero_sized(self.tcx(), ty),
843 user_ty,
844 const_id: None,
845 },
846 }
847 }
848
849 fn convert_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) -> Arm<'thir, 'tcx> {
850 Arm {
851 pattern: self.pattern_from_hir(&arm.pat),
852 guard: arm.guard.as_ref().map(|g| match g {
853 hir::Guard::If(ref e) => Guard::If(self.mirror_expr(e)),
854 hir::Guard::IfLet(ref pat, ref e) => {
855 Guard::IfLet(self.pattern_from_hir(pat), self.mirror_expr(e))
856 }
857 }),
858 body: self.mirror_expr(arm.body),
859 lint_level: LintLevel::Explicit(arm.hir_id),
860 scope: region::Scope { id: arm.hir_id.local_id, data: region::ScopeData::Node },
861 span: arm.span,
862 }
863 }
864
865 fn convert_path_expr(
866 &mut self,
867 expr: &'tcx hir::Expr<'tcx>,
868 res: Res,
869 ) -> ExprKind<'thir, 'tcx> {
870 let substs = self.typeck_results().node_substs(expr.hir_id);
871 match res {
872 // A regular function, constructor function or a constant.
873 Res::Def(DefKind::Fn, _)
874 | Res::Def(DefKind::AssocFn, _)
875 | Res::Def(DefKind::Ctor(_, CtorKind::Fn), _)
876 | Res::SelfCtor(..) => {
877 let user_ty = self.user_substs_applied_to_res(expr.hir_id, res);
878 debug!("convert_path_expr: user_ty={:?}", user_ty);
879 ExprKind::Literal {
880 literal: ty::Const::zero_sized(
881 self.tcx,
882 self.typeck_results().node_type(expr.hir_id),
883 ),
884 user_ty,
885 const_id: None,
886 }
887 }
888
889 Res::Def(DefKind::ConstParam, def_id) => {
890 let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id.expect_local());
891 let item_id = self.tcx.hir().get_parent_node(hir_id);
892 let item_def_id = self.tcx.hir().local_def_id(item_id);
893 let generics = self.tcx.generics_of(item_def_id);
894 let index = generics.param_def_id_to_index[&def_id];
895 let name = self.tcx.hir().name(hir_id);
896 let val = ty::ConstKind::Param(ty::ParamConst::new(index, name));
897 ExprKind::Literal {
898 literal: self.tcx.mk_const(ty::Const {
899 val,
900 ty: self.typeck_results().node_type(expr.hir_id),
901 }),
902 user_ty: None,
903 const_id: Some(def_id),
904 }
905 }
906
907 Res::Def(DefKind::Const, def_id) | Res::Def(DefKind::AssocConst, def_id) => {
908 let user_ty = self.user_substs_applied_to_res(expr.hir_id, res);
909 debug!("convert_path_expr: (const) user_ty={:?}", user_ty);
910 ExprKind::Literal {
911 literal: self.tcx.mk_const(ty::Const {
912 val: ty::ConstKind::Unevaluated(ty::Unevaluated {
913 def: ty::WithOptConstParam::unknown(def_id),
914 substs,
915 promoted: None,
916 }),
917 ty: self.typeck_results().node_type(expr.hir_id),
918 }),
919 user_ty,
920 const_id: Some(def_id),
921 }
922 }
923
924 Res::Def(DefKind::Ctor(_, CtorKind::Const), def_id) => {
925 let user_provided_types = self.typeck_results.user_provided_types();
926 let user_provided_type = user_provided_types.get(expr.hir_id).copied();
927 debug!("convert_path_expr: user_provided_type={:?}", user_provided_type);
928 let ty = self.typeck_results().node_type(expr.hir_id);
929 match ty.kind() {
930 // A unit struct/variant which is used as a value.
931 // We return a completely different ExprKind here to account for this special case.
932 ty::Adt(adt_def, substs) => ExprKind::Adt {
933 adt_def,
934 variant_index: adt_def.variant_index_with_ctor_id(def_id),
935 substs,
936 user_ty: user_provided_type,
937 fields: self.arena.alloc_from_iter(iter::empty()),
938 base: None,
939 },
940 _ => bug!("unexpected ty: {:?}", ty),
941 }
942 }
943
944 // We encode uses of statics as a `*&STATIC` where the `&STATIC` part is
945 // a constant reference (or constant raw pointer for `static mut`) in MIR
946 Res::Def(DefKind::Static, id) => {
947 let ty = self.tcx.static_ptr_ty(id);
948 let temp_lifetime = self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
949 let kind = if self.tcx.is_thread_local_static(id) {
950 ExprKind::ThreadLocalRef(id)
951 } else {
952 let ptr = self.tcx.create_static_alloc(id);
953 ExprKind::StaticRef {
954 literal: ty::Const::from_scalar(self.tcx, Scalar::Ptr(ptr.into()), ty),
955 def_id: id,
956 }
957 };
958 ExprKind::Deref {
959 arg: self.arena.alloc(Expr { ty, temp_lifetime, span: expr.span, kind }),
960 }
961 }
962
963 Res::Local(var_hir_id) => self.convert_var(var_hir_id),
964
965 _ => span_bug!(expr.span, "res `{:?}` not yet implemented", res),
966 }
967 }
968
969 fn convert_var(&mut self, var_hir_id: hir::HirId) -> ExprKind<'thir, 'tcx> {
970 // We want upvars here not captures.
971 // Captures will be handled in MIR.
972 let is_upvar = self
973 .tcx
974 .upvars_mentioned(self.body_owner)
975 .map_or(false, |upvars| upvars.contains_key(&var_hir_id));
976
977 debug!(
978 "convert_var({:?}): is_upvar={}, body_owner={:?}",
979 var_hir_id, is_upvar, self.body_owner
980 );
981
982 if is_upvar {
983 ExprKind::UpvarRef { closure_def_id: self.body_owner, var_hir_id }
984 } else {
985 ExprKind::VarRef { id: var_hir_id }
986 }
987 }
988
989 fn overloaded_operator(
990 &mut self,
991 expr: &'tcx hir::Expr<'tcx>,
992 args: &'thir [Expr<'thir, 'tcx>],
993 ) -> ExprKind<'thir, 'tcx> {
994 let fun = self.arena.alloc(self.method_callee(expr, expr.span, None));
995 ExprKind::Call { ty: fun.ty, fun, args, from_hir_call: false, fn_span: expr.span }
996 }
997
998 fn overloaded_place(
999 &mut self,
1000 expr: &'tcx hir::Expr<'tcx>,
1001 place_ty: Ty<'tcx>,
1002 overloaded_callee: Option<(DefId, SubstsRef<'tcx>)>,
1003 args: &'thir [Expr<'thir, 'tcx>],
1004 span: Span,
1005 ) -> ExprKind<'thir, 'tcx> {
1006 // For an overloaded *x or x[y] expression of type T, the method
1007 // call returns an &T and we must add the deref so that the types
1008 // line up (this is because `*x` and `x[y]` represent places):
1009
1010 // Reconstruct the output assuming it's a reference with the
1011 // same region and mutability as the receiver. This holds for
1012 // `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`.
1013 let (region, mutbl) = match *args[0].ty.kind() {
1014 ty::Ref(region, _, mutbl) => (region, mutbl),
1015 _ => span_bug!(span, "overloaded_place: receiver is not a reference"),
1016 };
1017 let ref_ty = self.tcx.mk_ref(region, ty::TypeAndMut { ty: place_ty, mutbl });
1018
1019 // construct the complete expression `foo()` for the overloaded call,
1020 // which will yield the &T type
1021 let temp_lifetime = self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
1022 let fun = self.arena.alloc(self.method_callee(expr, span, overloaded_callee));
1023 let ref_expr = self.arena.alloc(Expr {
1024 temp_lifetime,
1025 ty: ref_ty,
1026 span,
1027 kind: ExprKind::Call { ty: fun.ty, fun, args, from_hir_call: false, fn_span: span },
1028 });
1029
1030 // construct and return a deref wrapper `*foo()`
1031 ExprKind::Deref { arg: ref_expr }
1032 }
1033
1034 fn convert_captured_hir_place(
1035 &mut self,
1036 closure_expr: &'tcx hir::Expr<'tcx>,
1037 place: HirPlace<'tcx>,
1038 ) -> Expr<'thir, 'tcx> {
1039 let temp_lifetime = self.region_scope_tree.temporary_scope(closure_expr.hir_id.local_id);
1040 let var_ty = place.base_ty;
1041
1042 // The result of capture analysis in `rustc_typeck/check/upvar.rs`represents a captured path
1043 // as it's seen for use within the closure and not at the time of closure creation.
1044 //
1045 // That is we see expect to see it start from a captured upvar and not something that is local
1046 // to the closure's parent.
1047 let var_hir_id = match place.base {
1048 HirPlaceBase::Upvar(upvar_id) => upvar_id.var_path.hir_id,
1049 base => bug!("Expected an upvar, found {:?}", base),
1050 };
1051
1052 let mut captured_place_expr = Expr {
1053 temp_lifetime,
1054 ty: var_ty,
1055 span: closure_expr.span,
1056 kind: self.convert_var(var_hir_id),
1057 };
1058
1059 for proj in place.projections.iter() {
1060 let kind = match proj.kind {
1061 HirProjectionKind::Deref => {
1062 ExprKind::Deref { arg: self.arena.alloc(captured_place_expr) }
1063 }
1064 HirProjectionKind::Field(field, ..) => {
1065 // Variant index will always be 0, because for multi-variant
1066 // enums, we capture the enum entirely.
1067 ExprKind::Field {
1068 lhs: self.arena.alloc(captured_place_expr),
1069 name: Field::new(field as usize),
1070 }
1071 }
1072 HirProjectionKind::Index | HirProjectionKind::Subslice => {
1073 // We don't capture these projections, so we can ignore them here
1074 continue;
1075 }
1076 };
1077
1078 captured_place_expr =
1079 Expr { temp_lifetime, ty: proj.ty, span: closure_expr.span, kind };
1080 }
1081
1082 captured_place_expr
1083 }
1084
1085 fn capture_upvar(
1086 &mut self,
1087 closure_expr: &'tcx hir::Expr<'tcx>,
1088 captured_place: &'tcx ty::CapturedPlace<'tcx>,
1089 upvar_ty: Ty<'tcx>,
1090 ) -> Expr<'thir, 'tcx> {
1091 let upvar_capture = captured_place.info.capture_kind;
1092 let captured_place_expr =
1093 self.convert_captured_hir_place(closure_expr, captured_place.place.clone());
1094 let temp_lifetime = self.region_scope_tree.temporary_scope(closure_expr.hir_id.local_id);
1095
1096 match upvar_capture {
1097 ty::UpvarCapture::ByValue(_) => captured_place_expr,
1098 ty::UpvarCapture::ByRef(upvar_borrow) => {
1099 let borrow_kind = match upvar_borrow.kind {
1100 ty::BorrowKind::ImmBorrow => BorrowKind::Shared,
1101 ty::BorrowKind::UniqueImmBorrow => BorrowKind::Unique,
1102 ty::BorrowKind::MutBorrow => BorrowKind::Mut { allow_two_phase_borrow: false },
1103 };
1104 Expr {
1105 temp_lifetime,
1106 ty: upvar_ty,
1107 span: closure_expr.span,
1108 kind: ExprKind::Borrow {
1109 borrow_kind,
1110 arg: self.arena.alloc(captured_place_expr),
1111 },
1112 }
1113 }
1114 }
1115 }
1116
1117 /// Converts a list of named fields (i.e., for struct-like struct/enum ADTs) into FieldExpr.
1118 fn field_refs(
1119 &mut self,
1120 fields: &'tcx [hir::ExprField<'tcx>],
1121 ) -> &'thir [FieldExpr<'thir, 'tcx>] {
1122 self.arena.alloc_from_iter(fields.iter().map(|field| FieldExpr {
1123 name: Field::new(self.tcx.field_index(field.hir_id, self.typeck_results)),
1124 expr: self.mirror_expr(field.expr),
1125 }))
1126 }
1127 }
1128
1129 trait ToBorrowKind {
1130 fn to_borrow_kind(&self) -> BorrowKind;
1131 }
1132
1133 impl ToBorrowKind for AutoBorrowMutability {
1134 fn to_borrow_kind(&self) -> BorrowKind {
1135 use rustc_middle::ty::adjustment::AllowTwoPhase;
1136 match *self {
1137 AutoBorrowMutability::Mut { allow_two_phase_borrow } => BorrowKind::Mut {
1138 allow_two_phase_borrow: match allow_two_phase_borrow {
1139 AllowTwoPhase::Yes => true,
1140 AllowTwoPhase::No => false,
1141 },
1142 },
1143 AutoBorrowMutability::Not => BorrowKind::Shared,
1144 }
1145 }
1146 }
1147
1148 impl ToBorrowKind for hir::Mutability {
1149 fn to_borrow_kind(&self) -> BorrowKind {
1150 match *self {
1151 hir::Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
1152 hir::Mutability::Not => BorrowKind::Shared,
1153 }
1154 }
1155 }
1156
1157 fn bin_op(op: hir::BinOpKind) -> BinOp {
1158 match op {
1159 hir::BinOpKind::Add => BinOp::Add,
1160 hir::BinOpKind::Sub => BinOp::Sub,
1161 hir::BinOpKind::Mul => BinOp::Mul,
1162 hir::BinOpKind::Div => BinOp::Div,
1163 hir::BinOpKind::Rem => BinOp::Rem,
1164 hir::BinOpKind::BitXor => BinOp::BitXor,
1165 hir::BinOpKind::BitAnd => BinOp::BitAnd,
1166 hir::BinOpKind::BitOr => BinOp::BitOr,
1167 hir::BinOpKind::Shl => BinOp::Shl,
1168 hir::BinOpKind::Shr => BinOp::Shr,
1169 hir::BinOpKind::Eq => BinOp::Eq,
1170 hir::BinOpKind::Lt => BinOp::Lt,
1171 hir::BinOpKind::Le => BinOp::Le,
1172 hir::BinOpKind::Ne => BinOp::Ne,
1173 hir::BinOpKind::Ge => BinOp::Ge,
1174 hir::BinOpKind::Gt => BinOp::Gt,
1175 _ => bug!("no equivalent for ast binop {:?}", op),
1176 }
1177 }