]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
New upstream version 1.75.0+dfsg1
[rustc.git] / compiler / rustc_mir_build / src / build / expr / as_rvalue.rs
1 //! See docs in `build/expr/mod.rs`.
2
3 use rustc_index::{Idx, IndexVec};
4 use rustc_middle::ty::util::IntTypeExt;
5 use rustc_target::abi::{Abi, FieldIdx, Primitive};
6
7 use crate::build::expr::as_place::PlaceBase;
8 use crate::build::expr::category::{Category, RvalueFunc};
9 use crate::build::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary};
10 use rustc_hir::lang_items::LangItem;
11 use rustc_middle::middle::region;
12 use rustc_middle::mir::interpret::Scalar;
13 use rustc_middle::mir::AssertKind;
14 use rustc_middle::mir::Place;
15 use rustc_middle::mir::*;
16 use rustc_middle::thir::*;
17 use rustc_middle::ty::cast::{mir_cast_kind, CastTy};
18 use rustc_middle::ty::layout::IntegerExt;
19 use rustc_middle::ty::{self, Ty, UpvarArgs};
20 use rustc_span::Span;
21
22 impl<'a, 'tcx> Builder<'a, 'tcx> {
23 /// Returns an rvalue suitable for use until the end of the current
24 /// scope expression.
25 ///
26 /// The operand returned from this function will *not be valid* after
27 /// an ExprKind::Scope is passed, so please do *not* return it from
28 /// functions to avoid bad miscompiles.
29 pub(crate) fn as_local_rvalue(
30 &mut self,
31 block: BasicBlock,
32 expr: &Expr<'tcx>,
33 ) -> BlockAnd<Rvalue<'tcx>> {
34 let local_scope = self.local_scope();
35 self.as_rvalue(block, Some(local_scope), expr)
36 }
37
38 /// Compile `expr`, yielding an rvalue.
39 pub(crate) fn as_rvalue(
40 &mut self,
41 mut block: BasicBlock,
42 scope: Option<region::Scope>,
43 expr: &Expr<'tcx>,
44 ) -> BlockAnd<Rvalue<'tcx>> {
45 debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr);
46
47 let this = self;
48 let expr_span = expr.span;
49 let source_info = this.source_info(expr_span);
50
51 match expr.kind {
52 ExprKind::ThreadLocalRef(did) => block.and(Rvalue::ThreadLocalRef(did)),
53 ExprKind::Scope { region_scope, lint_level, value } => {
54 let region_scope = (region_scope, source_info);
55 this.in_scope(region_scope, lint_level, |this| {
56 this.as_rvalue(block, scope, &this.thir[value])
57 })
58 }
59 ExprKind::Repeat { value, count } => {
60 if Some(0) == count.try_eval_target_usize(this.tcx, this.param_env) {
61 this.build_zero_repeat(block, value, scope, source_info)
62 } else {
63 let value_operand = unpack!(
64 block = this.as_operand(
65 block,
66 scope,
67 &this.thir[value],
68 LocalInfo::Boring,
69 NeedsTemporary::No
70 )
71 );
72 block.and(Rvalue::Repeat(value_operand, count))
73 }
74 }
75 ExprKind::Binary { op, lhs, rhs } => {
76 let lhs = unpack!(
77 block = this.as_operand(
78 block,
79 scope,
80 &this.thir[lhs],
81 LocalInfo::Boring,
82 NeedsTemporary::Maybe
83 )
84 );
85 let rhs = unpack!(
86 block = this.as_operand(
87 block,
88 scope,
89 &this.thir[rhs],
90 LocalInfo::Boring,
91 NeedsTemporary::No
92 )
93 );
94 this.build_binary_op(block, op, expr_span, expr.ty, lhs, rhs)
95 }
96 ExprKind::Unary { op, arg } => {
97 let arg = unpack!(
98 block = this.as_operand(
99 block,
100 scope,
101 &this.thir[arg],
102 LocalInfo::Boring,
103 NeedsTemporary::No
104 )
105 );
106 // Check for -MIN on signed integers
107 if this.check_overflow && op == UnOp::Neg && expr.ty.is_signed() {
108 let bool_ty = this.tcx.types.bool;
109
110 let minval = this.minval_literal(expr_span, expr.ty);
111 let is_min = this.temp(bool_ty, expr_span);
112
113 this.cfg.push_assign(
114 block,
115 source_info,
116 is_min,
117 Rvalue::BinaryOp(BinOp::Eq, Box::new((arg.to_copy(), minval))),
118 );
119
120 block = this.assert(
121 block,
122 Operand::Move(is_min),
123 false,
124 AssertKind::OverflowNeg(arg.to_copy()),
125 expr_span,
126 );
127 }
128 block.and(Rvalue::UnaryOp(op, arg))
129 }
130 ExprKind::Box { value } => {
131 let value = &this.thir[value];
132 let tcx = this.tcx;
133
134 // `exchange_malloc` is unsafe but box is safe, so need a new scope.
135 let synth_scope = this.new_source_scope(
136 expr_span,
137 LintLevel::Inherited,
138 Some(Safety::BuiltinUnsafe),
139 );
140 let synth_info = SourceInfo { span: expr_span, scope: synth_scope };
141
142 let size = this.temp(tcx.types.usize, expr_span);
143 this.cfg.push_assign(
144 block,
145 synth_info,
146 size,
147 Rvalue::NullaryOp(NullOp::SizeOf, value.ty),
148 );
149
150 let align = this.temp(tcx.types.usize, expr_span);
151 this.cfg.push_assign(
152 block,
153 synth_info,
154 align,
155 Rvalue::NullaryOp(NullOp::AlignOf, value.ty),
156 );
157
158 // malloc some memory of suitable size and align:
159 let exchange_malloc = Operand::function_handle(
160 tcx,
161 tcx.require_lang_item(LangItem::ExchangeMalloc, Some(expr_span)),
162 [],
163 expr_span,
164 );
165 let storage = this.temp(Ty::new_mut_ptr(tcx, tcx.types.u8), expr_span);
166 let success = this.cfg.start_new_block();
167 this.cfg.terminate(
168 block,
169 synth_info,
170 TerminatorKind::Call {
171 func: exchange_malloc,
172 args: vec![Operand::Move(size), Operand::Move(align)],
173 destination: storage,
174 target: Some(success),
175 unwind: UnwindAction::Continue,
176 call_source: CallSource::Misc,
177 fn_span: expr_span,
178 },
179 );
180 this.diverge_from(block);
181 block = success;
182
183 // The `Box<T>` temporary created here is not a part of the HIR,
184 // and therefore is not considered during coroutine auto-trait
185 // determination. See the comment about `box` at `yield_in_scope`.
186 let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span));
187 this.cfg.push(
188 block,
189 Statement { source_info, kind: StatementKind::StorageLive(result) },
190 );
191 if let Some(scope) = scope {
192 // schedule a shallow free of that memory, lest we unwind:
193 this.schedule_drop_storage_and_value(expr_span, scope, result);
194 }
195
196 // Transmute `*mut u8` to the box (thus far, uninitialized):
197 let box_ = Rvalue::ShallowInitBox(Operand::Move(storage), value.ty);
198 this.cfg.push_assign(block, source_info, Place::from(result), box_);
199
200 // initialize the box contents:
201 unpack!(
202 block = this.expr_into_dest(
203 this.tcx.mk_place_deref(Place::from(result)),
204 block,
205 value
206 )
207 );
208 block.and(Rvalue::Use(Operand::Move(Place::from(result))))
209 }
210 ExprKind::Cast { source } => {
211 let source = &this.thir[source];
212
213 // Casting an enum to an integer is equivalent to computing the discriminant and casting the
214 // discriminant. Previously every backend had to repeat the logic for this operation. Now we
215 // create all the steps directly in MIR with operations all backends need to support anyway.
216 let (source, ty) = if let ty::Adt(adt_def, ..) = source.ty.kind()
217 && adt_def.is_enum()
218 {
219 let discr_ty = adt_def.repr().discr_type().to_ty(this.tcx);
220 let temp = unpack!(block = this.as_temp(block, scope, source, Mutability::Not));
221 let layout = this.tcx.layout_of(this.param_env.and(source.ty));
222 let discr = this.temp(discr_ty, source.span);
223 this.cfg.push_assign(
224 block,
225 source_info,
226 discr,
227 Rvalue::Discriminant(temp.into()),
228 );
229 let (op, ty) = (Operand::Move(discr), discr_ty);
230
231 if let Abi::Scalar(scalar) = layout.unwrap().abi
232 && !scalar.is_always_valid(&this.tcx)
233 && let Primitive::Int(int_width, _signed) = scalar.primitive()
234 {
235 let unsigned_ty = int_width.to_ty(this.tcx, false);
236 let unsigned_place = this.temp(unsigned_ty, expr_span);
237 this.cfg.push_assign(
238 block,
239 source_info,
240 unsigned_place,
241 Rvalue::Cast(CastKind::IntToInt, Operand::Copy(discr), unsigned_ty),
242 );
243
244 let bool_ty = this.tcx.types.bool;
245 let range = scalar.valid_range(&this.tcx);
246 let merge_op =
247 if range.start <= range.end { BinOp::BitAnd } else { BinOp::BitOr };
248
249 let mut comparer = |range: u128, bin_op: BinOp| -> Place<'tcx> {
250 let range_val = Const::from_bits(
251 this.tcx,
252 range,
253 ty::ParamEnv::empty().and(unsigned_ty),
254 );
255 let lit_op = this.literal_operand(expr.span, range_val);
256 let is_bin_op = this.temp(bool_ty, expr_span);
257 this.cfg.push_assign(
258 block,
259 source_info,
260 is_bin_op,
261 Rvalue::BinaryOp(
262 bin_op,
263 Box::new((Operand::Copy(unsigned_place), lit_op)),
264 ),
265 );
266 is_bin_op
267 };
268 let assert_place = if range.start == 0 {
269 comparer(range.end, BinOp::Le)
270 } else {
271 let start_place = comparer(range.start, BinOp::Ge);
272 let end_place = comparer(range.end, BinOp::Le);
273 let merge_place = this.temp(bool_ty, expr_span);
274 this.cfg.push_assign(
275 block,
276 source_info,
277 merge_place,
278 Rvalue::BinaryOp(
279 merge_op,
280 Box::new((
281 Operand::Move(start_place),
282 Operand::Move(end_place),
283 )),
284 ),
285 );
286 merge_place
287 };
288 this.cfg.push(
289 block,
290 Statement {
291 source_info,
292 kind: StatementKind::Intrinsic(Box::new(
293 NonDivergingIntrinsic::Assume(Operand::Move(assert_place)),
294 )),
295 },
296 );
297 }
298
299 (op, ty)
300 } else {
301 let ty = source.ty;
302 let source = unpack!(
303 block = this.as_operand(
304 block,
305 scope,
306 source,
307 LocalInfo::Boring,
308 NeedsTemporary::No
309 )
310 );
311 (source, ty)
312 };
313 let from_ty = CastTy::from_ty(ty);
314 let cast_ty = CastTy::from_ty(expr.ty);
315 debug!("ExprKind::Cast from_ty={from_ty:?}, cast_ty={:?}/{cast_ty:?}", expr.ty,);
316 let cast_kind = mir_cast_kind(ty, expr.ty);
317 block.and(Rvalue::Cast(cast_kind, source, expr.ty))
318 }
319 ExprKind::PointerCoercion { cast, source } => {
320 let source = unpack!(
321 block = this.as_operand(
322 block,
323 scope,
324 &this.thir[source],
325 LocalInfo::Boring,
326 NeedsTemporary::No
327 )
328 );
329 block.and(Rvalue::Cast(CastKind::PointerCoercion(cast), source, expr.ty))
330 }
331 ExprKind::Array { ref fields } => {
332 // (*) We would (maybe) be closer to codegen if we
333 // handled this and other aggregate cases via
334 // `into()`, not `as_rvalue` -- in that case, instead
335 // of generating
336 //
337 // let tmp1 = ...1;
338 // let tmp2 = ...2;
339 // dest = Rvalue::Aggregate(Foo, [tmp1, tmp2])
340 //
341 // we could just generate
342 //
343 // dest.f = ...1;
344 // dest.g = ...2;
345 //
346 // The problem is that then we would need to:
347 //
348 // (a) have a more complex mechanism for handling
349 // partial cleanup;
350 // (b) distinguish the case where the type `Foo` has a
351 // destructor, in which case creating an instance
352 // as a whole "arms" the destructor, and you can't
353 // write individual fields; and,
354 // (c) handle the case where the type Foo has no
355 // fields. We don't want `let x: ();` to compile
356 // to the same MIR as `let x = ();`.
357
358 // first process the set of fields
359 let el_ty = expr.ty.sequence_element_type(this.tcx);
360 let fields: IndexVec<FieldIdx, _> = fields
361 .into_iter()
362 .copied()
363 .map(|f| {
364 unpack!(
365 block = this.as_operand(
366 block,
367 scope,
368 &this.thir[f],
369 LocalInfo::Boring,
370 NeedsTemporary::Maybe
371 )
372 )
373 })
374 .collect();
375
376 block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(el_ty)), fields))
377 }
378 ExprKind::Tuple { ref fields } => {
379 // see (*) above
380 // first process the set of fields
381 let fields: IndexVec<FieldIdx, _> = fields
382 .into_iter()
383 .copied()
384 .map(|f| {
385 unpack!(
386 block = this.as_operand(
387 block,
388 scope,
389 &this.thir[f],
390 LocalInfo::Boring,
391 NeedsTemporary::Maybe
392 )
393 )
394 })
395 .collect();
396
397 block.and(Rvalue::Aggregate(Box::new(AggregateKind::Tuple), fields))
398 }
399 ExprKind::Closure(box ClosureExpr {
400 closure_id,
401 args,
402 ref upvars,
403 movability,
404 ref fake_reads,
405 }) => {
406 // Convert the closure fake reads, if any, from `ExprRef` to mir `Place`
407 // and push the fake reads.
408 // This must come before creating the operands. This is required in case
409 // there is a fake read and a borrow of the same path, since otherwise the
410 // fake read might interfere with the borrow. Consider an example like this
411 // one:
412 // ```
413 // let mut x = 0;
414 // let c = || {
415 // &mut x; // mutable borrow of `x`
416 // match x { _ => () } // fake read of `x`
417 // };
418 // ```
419 //
420 for (thir_place, cause, hir_id) in fake_reads.into_iter() {
421 let place_builder =
422 unpack!(block = this.as_place_builder(block, &this.thir[*thir_place]));
423
424 if let Some(mir_place) = place_builder.try_to_place(this) {
425 this.cfg.push_fake_read(
426 block,
427 this.source_info(this.tcx.hir().span(*hir_id)),
428 *cause,
429 mir_place,
430 );
431 }
432 }
433
434 // see (*) above
435 let operands: IndexVec<FieldIdx, _> = upvars
436 .into_iter()
437 .copied()
438 .map(|upvar| {
439 let upvar = &this.thir[upvar];
440 match Category::of(&upvar.kind) {
441 // Use as_place to avoid creating a temporary when
442 // moving a variable into a closure, so that
443 // borrowck knows which variables to mark as being
444 // used as mut. This is OK here because the upvar
445 // expressions have no side effects and act on
446 // disjoint places.
447 // This occurs when capturing by copy/move, while
448 // by reference captures use as_operand
449 Some(Category::Place) => {
450 let place = unpack!(block = this.as_place(block, upvar));
451 this.consume_by_copy_or_move(place)
452 }
453 _ => {
454 // Turn mutable borrow captures into unique
455 // borrow captures when capturing an immutable
456 // variable. This is sound because the mutation
457 // that caused the capture will cause an error.
458 match upvar.kind {
459 ExprKind::Borrow {
460 borrow_kind:
461 BorrowKind::Mut { kind: MutBorrowKind::Default },
462 arg,
463 } => unpack!(
464 block = this.limit_capture_mutability(
465 upvar.span,
466 upvar.ty,
467 scope,
468 block,
469 &this.thir[arg],
470 )
471 ),
472 _ => {
473 unpack!(
474 block = this.as_operand(
475 block,
476 scope,
477 upvar,
478 LocalInfo::Boring,
479 NeedsTemporary::Maybe
480 )
481 )
482 }
483 }
484 }
485 }
486 })
487 .collect();
488
489 let result = match args {
490 UpvarArgs::Coroutine(args) => {
491 // We implicitly set the discriminant to 0. See
492 // librustc_mir/transform/deaggregator.rs for details.
493 let movability = movability.unwrap();
494 Box::new(AggregateKind::Coroutine(closure_id.to_def_id(), args, movability))
495 }
496 UpvarArgs::Closure(args) => {
497 Box::new(AggregateKind::Closure(closure_id.to_def_id(), args))
498 }
499 };
500 block.and(Rvalue::Aggregate(result, operands))
501 }
502 ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => {
503 block = unpack!(this.stmt_expr(block, expr, None));
504 block.and(Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
505 span: expr_span,
506 user_ty: None,
507 const_: Const::zero_sized(this.tcx.types.unit),
508 }))))
509 }
510
511 ExprKind::OffsetOf { container, fields } => {
512 block.and(Rvalue::NullaryOp(NullOp::OffsetOf(fields), container))
513 }
514
515 ExprKind::Literal { .. }
516 | ExprKind::NamedConst { .. }
517 | ExprKind::NonHirLiteral { .. }
518 | ExprKind::ZstLiteral { .. }
519 | ExprKind::ConstParam { .. }
520 | ExprKind::ConstBlock { .. }
521 | ExprKind::StaticRef { .. } => {
522 let constant = this.as_constant(expr);
523 block.and(Rvalue::Use(Operand::Constant(Box::new(constant))))
524 }
525
526 ExprKind::Yield { .. }
527 | ExprKind::Block { .. }
528 | ExprKind::Match { .. }
529 | ExprKind::If { .. }
530 | ExprKind::NeverToAny { .. }
531 | ExprKind::Use { .. }
532 | ExprKind::Borrow { .. }
533 | ExprKind::AddressOf { .. }
534 | ExprKind::Adt { .. }
535 | ExprKind::Loop { .. }
536 | ExprKind::LogicalOp { .. }
537 | ExprKind::Call { .. }
538 | ExprKind::Field { .. }
539 | ExprKind::Let { .. }
540 | ExprKind::Deref { .. }
541 | ExprKind::Index { .. }
542 | ExprKind::VarRef { .. }
543 | ExprKind::UpvarRef { .. }
544 | ExprKind::Break { .. }
545 | ExprKind::Continue { .. }
546 | ExprKind::Return { .. }
547 | ExprKind::Become { .. }
548 | ExprKind::InlineAsm { .. }
549 | ExprKind::PlaceTypeAscription { .. }
550 | ExprKind::ValueTypeAscription { .. } => {
551 // these do not have corresponding `Rvalue` variants,
552 // so make an operand and then return that
553 debug_assert!(!matches!(
554 Category::of(&expr.kind),
555 Some(Category::Rvalue(RvalueFunc::AsRvalue) | Category::Constant)
556 ));
557 let operand = unpack!(
558 block =
559 this.as_operand(block, scope, expr, LocalInfo::Boring, NeedsTemporary::No)
560 );
561 block.and(Rvalue::Use(operand))
562 }
563 }
564 }
565
566 pub(crate) fn build_binary_op(
567 &mut self,
568 mut block: BasicBlock,
569 op: BinOp,
570 span: Span,
571 ty: Ty<'tcx>,
572 lhs: Operand<'tcx>,
573 rhs: Operand<'tcx>,
574 ) -> BlockAnd<Rvalue<'tcx>> {
575 let source_info = self.source_info(span);
576 let bool_ty = self.tcx.types.bool;
577 let rvalue = match op {
578 BinOp::Add | BinOp::Sub | BinOp::Mul if self.check_overflow && ty.is_integral() => {
579 let result_tup = Ty::new_tup(self.tcx, &[ty, bool_ty]);
580 let result_value = self.temp(result_tup, span);
581
582 self.cfg.push_assign(
583 block,
584 source_info,
585 result_value,
586 Rvalue::CheckedBinaryOp(op, Box::new((lhs.to_copy(), rhs.to_copy()))),
587 );
588 let val_fld = FieldIdx::new(0);
589 let of_fld = FieldIdx::new(1);
590
591 let tcx = self.tcx;
592 let val = tcx.mk_place_field(result_value, val_fld, ty);
593 let of = tcx.mk_place_field(result_value, of_fld, bool_ty);
594
595 let err = AssertKind::Overflow(op, lhs, rhs);
596 block = self.assert(block, Operand::Move(of), false, err, span);
597
598 Rvalue::Use(Operand::Move(val))
599 }
600 BinOp::Shl | BinOp::Shr if self.check_overflow && ty.is_integral() => {
601 // For an unsigned RHS, the shift is in-range for `rhs < bits`.
602 // For a signed RHS, `IntToInt` cast to the equivalent unsigned
603 // type and do that same comparison. Because the type is the
604 // same size, there's no negative shift amount that ends up
605 // overlapping with valid ones, thus it catches negatives too.
606 let (lhs_size, _) = ty.int_size_and_signed(self.tcx);
607 let rhs_ty = rhs.ty(&self.local_decls, self.tcx);
608 let (rhs_size, _) = rhs_ty.int_size_and_signed(self.tcx);
609
610 let (unsigned_rhs, unsigned_ty) = match rhs_ty.kind() {
611 ty::Uint(_) => (rhs.to_copy(), rhs_ty),
612 ty::Int(int_width) => {
613 let uint_ty = Ty::new_uint(self.tcx, int_width.to_unsigned());
614 let rhs_temp = self.temp(uint_ty, span);
615 self.cfg.push_assign(
616 block,
617 source_info,
618 rhs_temp,
619 Rvalue::Cast(CastKind::IntToInt, rhs.to_copy(), uint_ty),
620 );
621 (Operand::Move(rhs_temp), uint_ty)
622 }
623 _ => unreachable!("only integers are shiftable"),
624 };
625
626 // This can't overflow because the largest shiftable types are 128-bit,
627 // which fits in `u8`, the smallest possible `unsigned_ty`.
628 // (And `from_uint` will `bug!` if that's ever no longer true.)
629 let lhs_bits = Operand::const_from_scalar(
630 self.tcx,
631 unsigned_ty,
632 Scalar::from_uint(lhs_size.bits(), rhs_size),
633 span,
634 );
635
636 let inbounds = self.temp(bool_ty, span);
637 self.cfg.push_assign(
638 block,
639 source_info,
640 inbounds,
641 Rvalue::BinaryOp(BinOp::Lt, Box::new((unsigned_rhs, lhs_bits))),
642 );
643
644 let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy());
645 block = self.assert(block, Operand::Move(inbounds), true, overflow_err, span);
646 Rvalue::BinaryOp(op, Box::new((lhs, rhs)))
647 }
648 BinOp::Div | BinOp::Rem if ty.is_integral() => {
649 // Checking division and remainder is more complex, since we 1. always check
650 // and 2. there are two possible failure cases, divide-by-zero and overflow.
651
652 let zero_err = if op == BinOp::Div {
653 AssertKind::DivisionByZero(lhs.to_copy())
654 } else {
655 AssertKind::RemainderByZero(lhs.to_copy())
656 };
657 let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy());
658
659 // Check for / 0
660 let is_zero = self.temp(bool_ty, span);
661 let zero = self.zero_literal(span, ty);
662 self.cfg.push_assign(
663 block,
664 source_info,
665 is_zero,
666 Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), zero))),
667 );
668
669 block = self.assert(block, Operand::Move(is_zero), false, zero_err, span);
670
671 // We only need to check for the overflow in one case:
672 // MIN / -1, and only for signed values.
673 if ty.is_signed() {
674 let neg_1 = self.neg_1_literal(span, ty);
675 let min = self.minval_literal(span, ty);
676
677 let is_neg_1 = self.temp(bool_ty, span);
678 let is_min = self.temp(bool_ty, span);
679 let of = self.temp(bool_ty, span);
680
681 // this does (rhs == -1) & (lhs == MIN). It could short-circuit instead
682
683 self.cfg.push_assign(
684 block,
685 source_info,
686 is_neg_1,
687 Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), neg_1))),
688 );
689 self.cfg.push_assign(
690 block,
691 source_info,
692 is_min,
693 Rvalue::BinaryOp(BinOp::Eq, Box::new((lhs.to_copy(), min))),
694 );
695
696 let is_neg_1 = Operand::Move(is_neg_1);
697 let is_min = Operand::Move(is_min);
698 self.cfg.push_assign(
699 block,
700 source_info,
701 of,
702 Rvalue::BinaryOp(BinOp::BitAnd, Box::new((is_neg_1, is_min))),
703 );
704
705 block = self.assert(block, Operand::Move(of), false, overflow_err, span);
706 }
707
708 Rvalue::BinaryOp(op, Box::new((lhs, rhs)))
709 }
710 _ => Rvalue::BinaryOp(op, Box::new((lhs, rhs))),
711 };
712 block.and(rvalue)
713 }
714
715 fn build_zero_repeat(
716 &mut self,
717 mut block: BasicBlock,
718 value: ExprId,
719 scope: Option<region::Scope>,
720 outer_source_info: SourceInfo,
721 ) -> BlockAnd<Rvalue<'tcx>> {
722 let this = self;
723 let value = &this.thir[value];
724 let elem_ty = value.ty;
725 if let Some(Category::Constant) = Category::of(&value.kind) {
726 // Repeating a const does nothing
727 } else {
728 // For a non-const, we may need to generate an appropriate `Drop`
729 let value_operand = unpack!(
730 block = this.as_operand(block, scope, value, LocalInfo::Boring, NeedsTemporary::No)
731 );
732 if let Operand::Move(to_drop) = value_operand {
733 let success = this.cfg.start_new_block();
734 this.cfg.terminate(
735 block,
736 outer_source_info,
737 TerminatorKind::Drop {
738 place: to_drop,
739 target: success,
740 unwind: UnwindAction::Continue,
741 replace: false,
742 },
743 );
744 this.diverge_from(block);
745 block = success;
746 }
747 this.record_operands_moved(&[value_operand]);
748 }
749 block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(elem_ty)), IndexVec::new()))
750 }
751
752 fn limit_capture_mutability(
753 &mut self,
754 upvar_span: Span,
755 upvar_ty: Ty<'tcx>,
756 temp_lifetime: Option<region::Scope>,
757 mut block: BasicBlock,
758 arg: &Expr<'tcx>,
759 ) -> BlockAnd<Operand<'tcx>> {
760 let this = self;
761
762 let source_info = this.source_info(upvar_span);
763 let temp = this.local_decls.push(LocalDecl::new(upvar_ty, upvar_span));
764
765 this.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) });
766
767 let arg_place_builder = unpack!(block = this.as_place_builder(block, arg));
768
769 let mutability = match arg_place_builder.base() {
770 // We are capturing a path that starts off a local variable in the parent.
771 // The mutability of the current capture is same as the mutability
772 // of the local declaration in the parent.
773 PlaceBase::Local(local) => this.local_decls[local].mutability,
774 // Parent is a closure and we are capturing a path that is captured
775 // by the parent itself. The mutability of the current capture
776 // is same as that of the capture in the parent closure.
777 PlaceBase::Upvar { .. } => {
778 let enclosing_upvars_resolved = arg_place_builder.to_place(this);
779
780 match enclosing_upvars_resolved.as_ref() {
781 PlaceRef {
782 local,
783 projection: &[ProjectionElem::Field(upvar_index, _), ..],
784 }
785 | PlaceRef {
786 local,
787 projection:
788 &[ProjectionElem::Deref, ProjectionElem::Field(upvar_index, _), ..],
789 } => {
790 // Not in a closure
791 debug_assert!(
792 local == ty::CAPTURE_STRUCT_LOCAL,
793 "Expected local to be Local(1), found {local:?}"
794 );
795 // Not in a closure
796 debug_assert!(
797 this.upvars.len() > upvar_index.index(),
798 "Unexpected capture place, upvars={:#?}, upvar_index={:?}",
799 this.upvars,
800 upvar_index
801 );
802 this.upvars[upvar_index.index()].mutability
803 }
804 _ => bug!("Unexpected capture place"),
805 }
806 }
807 };
808
809 let borrow_kind = match mutability {
810 Mutability::Not => BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture },
811 Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default },
812 };
813
814 let arg_place = arg_place_builder.to_place(this);
815
816 this.cfg.push_assign(
817 block,
818 source_info,
819 Place::from(temp),
820 Rvalue::Ref(this.tcx.lifetimes.re_erased, borrow_kind, arg_place),
821 );
822
823 // See the comment in `expr_as_temp` and on the `rvalue_scopes` field for why
824 // this can be `None`.
825 if let Some(temp_lifetime) = temp_lifetime {
826 this.schedule_drop_storage_and_value(upvar_span, temp_lifetime, temp);
827 }
828
829 block.and(Operand::Move(Place::from(temp)))
830 }
831
832 // Helper to get a `-1` value of the appropriate type
833 fn neg_1_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> {
834 let param_ty = ty::ParamEnv::empty().and(ty);
835 let size = self.tcx.layout_of(param_ty).unwrap().size;
836 let literal = Const::from_bits(self.tcx, size.unsigned_int_max(), param_ty);
837
838 self.literal_operand(span, literal)
839 }
840
841 // Helper to get the minimum value of the appropriate type
842 fn minval_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> {
843 assert!(ty.is_signed());
844 let param_ty = ty::ParamEnv::empty().and(ty);
845 let bits = self.tcx.layout_of(param_ty).unwrap().size.bits();
846 let n = 1 << (bits - 1);
847 let literal = Const::from_bits(self.tcx, n, param_ty);
848
849 self.literal_operand(span, literal)
850 }
851 }