1 //! Global value numbering.
3 //! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4 //! such redundancies and re-use the already-computed result when possible.
6 //! In a first pass, we compute a symbolic representation of values that are assigned to SSA
7 //! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
8 //! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
10 //! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
11 //! values, the locals in which they are stored, and a the assignment location.
13 //! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each
14 //! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
15 //! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
16 //! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
17 //! to `x`, we replace the assignment by `x = y`.
19 //! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
21 //! # Operational semantic
23 //! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
25 //! _a = some value // has VnIndex i
27 //! _b = some other value // also has VnIndex i
30 //! We consider it to be replacable by:
32 //! _a = some value // has VnIndex i
34 //! _c = some other value // also has VnIndex i
35 //! assume(_a bitwise equal to _c) // follows from having the same VnIndex
36 //! _b = _a // follows from the `assume`
39 //! Which is simplifiable to:
41 //! _a = some value // has VnIndex i
46 //! # Handling of references
48 //! We handle references by assigning a different "provenance" index to each Ref/AddressOf rvalue.
49 //! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
50 //! consider all the derefs of an immutable reference to a freeze type to give the same value:
52 //! _a = *_b // _b is &Freeze
53 //! _c = *_b // replaced by _c = _a
56 //! # Determinism of constant propagation
58 //! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
59 //! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
61 //! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
62 //! different runtime values each time they are evaluated. This is the case with
63 //! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
64 //! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
65 //! symbol in each codegen unit.
67 //! Meanwhile, we want to be able to read indirect constants. For instance:
69 //! static A: &'static &'static u8 = &&63;
71 //! **A // We want to replace by 63.
74 //! b"abc"[1] // We want to replace by 'b'.
78 //! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
79 //! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
80 //! merge the constants. See `duplicate_slice` test in `gvn.rs`.
82 //! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
83 //! that contain `AllocId`s.
85 use rustc_const_eval
::interpret
::{intern_const_alloc_for_constprop, MemoryKind}
;
86 use rustc_const_eval
::interpret
::{ImmTy, InterpCx, OpTy, Projectable, Scalar}
;
87 use rustc_data_structures
::fx
::{FxHashMap, FxIndexSet}
;
88 use rustc_data_structures
::graph
::dominators
::Dominators
;
89 use rustc_hir
::def
::DefKind
;
90 use rustc_index
::bit_set
::BitSet
;
91 use rustc_index
::IndexVec
;
92 use rustc_macros
::newtype_index
;
93 use rustc_middle
::mir
::interpret
::GlobalAlloc
;
94 use rustc_middle
::mir
::visit
::*;
95 use rustc_middle
::mir
::*;
96 use rustc_middle
::ty
::adjustment
::PointerCoercion
;
97 use rustc_middle
::ty
::layout
::LayoutOf
;
98 use rustc_middle
::ty
::{self, Ty, TyCtxt, TypeAndMut}
;
99 use rustc_span
::def_id
::DefId
;
100 use rustc_span
::DUMMY_SP
;
101 use rustc_target
::abi
::{self, Abi, Size, VariantIdx, FIRST_VARIANT}
;
102 use std
::borrow
::Cow
;
104 use crate::dataflow_const_prop
::DummyMachine
;
105 use crate::ssa
::{AssignedValue, SsaLocals}
;
111 impl<'tcx
> MirPass
<'tcx
> for GVN
{
112 fn is_enabled(&self, sess
: &rustc_session
::Session
) -> bool
{
113 sess
.mir_opt_level() >= 4
116 #[instrument(level = "trace", skip(self, tcx, body))]
117 fn run_pass(&self, tcx
: TyCtxt
<'tcx
>, body
: &mut Body
<'tcx
>) {
118 debug
!(def_id
= ?body
.source
.def_id());
119 propagate_ssa(tcx
, body
);
123 fn propagate_ssa
<'tcx
>(tcx
: TyCtxt
<'tcx
>, body
: &mut Body
<'tcx
>) {
124 let param_env
= tcx
.param_env_reveal_all_normalized(body
.source
.def_id());
125 let ssa
= SsaLocals
::new(body
);
126 // Clone dominators as we need them while mutating the body.
127 let dominators
= body
.basic_blocks
.dominators().clone();
129 let mut state
= VnState
::new(tcx
, param_env
, &ssa
, &dominators
, &body
.local_decls
);
130 ssa
.for_each_assignment_mut(
131 body
.basic_blocks
.as_mut_preserves_cfg(),
132 |local
, value
, location
| {
133 let value
= match value
{
134 // We do not know anything of this assigned value.
135 AssignedValue
::Arg
| AssignedValue
::Terminator(_
) => None
,
136 // Try to get some insight.
137 AssignedValue
::Rvalue(rvalue
) => {
138 let value
= state
.simplify_rvalue(rvalue
, location
);
139 // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark `local` as
140 // reusable if we have an exact type match.
141 if state
.local_decls
[local
].ty
!= rvalue
.ty(state
.local_decls
, tcx
) {
147 // `next_opaque` is `Some`, so `new_opaque` must return `Some`.
148 let value
= value
.or_else(|| state
.new_opaque()).unwrap();
149 state
.assign(local
, value
);
153 // Stop creating opaques during replacement as it is useless.
154 state
.next_opaque
= None
;
156 let reverse_postorder
= body
.basic_blocks
.reverse_postorder().to_vec();
157 for bb
in reverse_postorder
{
158 let data
= &mut body
.basic_blocks
.as_mut_preserves_cfg()[bb
];
159 state
.visit_basic_block_data(bb
, data
);
162 // For each local that is reused (`y` above), we remove its storage statements do avoid any
163 // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
165 StorageRemover { tcx, reused_locals: state.reused_locals }
.visit_body_preserves_cfg(body
);
172 /// Computing the aggregate's type can be quite slow, so we only keep the minimal amount of
173 /// information to reconstruct it when needed.
174 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
175 enum AggregateTy
<'tcx
> {
176 /// Invariant: this must not be used for an empty array.
179 Def(DefId
, ty
::GenericArgsRef
<'tcx
>),
182 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
188 #[derive(Debug, PartialEq, Eq, Hash)]
191 /// Used to represent values we know nothing about.
192 /// The `usize` is a counter incremented by `new_opaque`.
194 /// Evaluated or unevaluated constant value.
197 /// Some constants do not have a deterministic value. To avoid merging two instances of the
198 /// same `Const`, we assign them an additional integer index.
199 disambiguator
: usize,
201 /// An aggregate value, either tuple/closure/struct/enum.
202 /// This does not contain unions, as we cannot reason with the value.
203 Aggregate(AggregateTy
<'tcx
>, VariantIdx
, Vec
<VnIndex
>),
204 /// This corresponds to a `[value; count]` expression.
205 Repeat(VnIndex
, ty
::Const
<'tcx
>),
206 /// The address of a place.
210 /// Give each borrow and pointer a different provenance, so we don't merge them.
215 /// This is the *value* obtained by projecting another value.
216 Projection(VnIndex
, ProjectionElem
<VnIndex
, Ty
<'tcx
>>),
217 /// Discriminant of the given value.
218 Discriminant(VnIndex
),
219 /// Length of an array or slice.
223 NullaryOp(NullOp
<'tcx
>, Ty
<'tcx
>),
224 UnaryOp(UnOp
, VnIndex
),
225 BinaryOp(BinOp
, VnIndex
, VnIndex
),
226 CheckedBinaryOp(BinOp
, VnIndex
, VnIndex
),
235 struct VnState
<'body
, 'tcx
> {
237 ecx
: InterpCx
<'tcx
, 'tcx
, DummyMachine
>,
238 param_env
: ty
::ParamEnv
<'tcx
>,
239 local_decls
: &'body LocalDecls
<'tcx
>,
240 /// Value stored in each local.
241 locals
: IndexVec
<Local
, Option
<VnIndex
>>,
242 /// First local to be assigned that value.
243 rev_locals
: FxHashMap
<VnIndex
, Vec
<Local
>>,
244 values
: FxIndexSet
<Value
<'tcx
>>,
245 /// Values evaluated as constants if possible.
246 evaluated
: IndexVec
<VnIndex
, Option
<OpTy
<'tcx
>>>,
247 /// Counter to generate different values.
248 /// This is an option to stop creating opaques during replacement.
249 next_opaque
: Option
<usize>,
250 ssa
: &'body SsaLocals
,
251 dominators
: &'body Dominators
<BasicBlock
>,
252 reused_locals
: BitSet
<Local
>,
255 impl<'body
, 'tcx
> VnState
<'body
, 'tcx
> {
258 param_env
: ty
::ParamEnv
<'tcx
>,
259 ssa
: &'body SsaLocals
,
260 dominators
: &'body Dominators
<BasicBlock
>,
261 local_decls
: &'body LocalDecls
<'tcx
>,
265 ecx
: InterpCx
::new(tcx
, DUMMY_SP
, param_env
, DummyMachine
),
268 locals
: IndexVec
::from_elem(None
, local_decls
),
269 rev_locals
: FxHashMap
::default(),
270 values
: FxIndexSet
::default(),
271 evaluated
: IndexVec
::new(),
272 next_opaque
: Some(0),
275 reused_locals
: BitSet
::new_empty(local_decls
.len()),
279 #[instrument(level = "trace", skip(self), ret)]
280 fn insert(&mut self, value
: Value
<'tcx
>) -> VnIndex
{
281 let (index
, new
) = self.values
.insert_full(value
);
282 let index
= VnIndex
::from_usize(index
);
284 let evaluated
= self.eval_to_const(index
);
285 let _index
= self.evaluated
.push(evaluated
);
286 debug_assert_eq
!(index
, _index
);
291 /// Create a new `Value` for which we have no information at all, except that it is distinct
292 /// from all the others.
293 #[instrument(level = "trace", skip(self), ret)]
294 fn new_opaque(&mut self) -> Option
<VnIndex
> {
295 let next_opaque
= self.next_opaque
.as_mut()?
;
296 let value
= Value
::Opaque(*next_opaque
);
298 Some(self.insert(value
))
301 /// Create a new `Value::Address` distinct from all the others.
302 #[instrument(level = "trace", skip(self), ret)]
303 fn new_pointer(&mut self, place
: Place
<'tcx
>, kind
: AddressKind
) -> Option
<VnIndex
> {
304 let next_opaque
= self.next_opaque
.as_mut()?
;
305 let value
= Value
::Address { place, kind, provenance: *next_opaque }
;
307 Some(self.insert(value
))
310 fn get(&self, index
: VnIndex
) -> &Value
<'tcx
> {
311 self.values
.get_index(index
.as_usize()).unwrap()
314 /// Record that `local` is assigned `value`. `local` must be SSA.
315 #[instrument(level = "trace", skip(self))]
316 fn assign(&mut self, local
: Local
, value
: VnIndex
) {
317 self.locals
[local
] = Some(value
);
319 // Only register the value if its type is `Sized`, as we will emit copies of it.
320 let is_sized
= !self.tcx
.features().unsized_locals
321 || self.local_decls
[local
].ty
.is_sized(self.tcx
, self.param_env
);
323 self.rev_locals
.entry(value
).or_default().push(local
);
327 fn insert_constant(&mut self, value
: Const
<'tcx
>) -> Option
<VnIndex
> {
328 let disambiguator
= if value
.is_deterministic() {
329 // The constant is deterministic, no need to disambiguate.
332 // Multiple mentions of this constant will yield different values,
333 // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
334 let next_opaque
= self.next_opaque
.as_mut()?
;
335 let disambiguator
= *next_opaque
;
339 Some(self.insert(Value
::Constant { value, disambiguator }
))
342 fn insert_scalar(&mut self, scalar
: Scalar
, ty
: Ty
<'tcx
>) -> VnIndex
{
343 self.insert_constant(Const
::from_scalar(self.tcx
, scalar
, ty
))
344 .expect("scalars are deterministic")
347 #[instrument(level = "trace", skip(self), ret)]
348 fn eval_to_const(&mut self, value
: VnIndex
) -> Option
<OpTy
<'tcx
>> {
350 let op
= match *self.get(value
) {
351 Opaque(_
) => return None
,
352 // Do not bother evaluating repeat expressions. This would uselessly consume memory.
353 Repeat(..) => return None
,
355 Constant { ref value, disambiguator: _ }
=> {
356 self.ecx
.eval_mir_constant(value
, None
, None
).ok()?
358 Aggregate(kind
, variant
, ref fields
) => {
361 .map(|&f
| self.evaluated
[f
].as_ref())
362 .collect
::<Option
<Vec
<_
>>>()?
;
363 let ty
= match kind
{
364 AggregateTy
::Array
=> {
365 assert
!(fields
.len() > 0);
366 Ty
::new_array(self.tcx
, fields
[0].layout
.ty
, fields
.len() as u64)
368 AggregateTy
::Tuple
=> {
369 Ty
::new_tup_from_iter(self.tcx
, fields
.iter().map(|f
| f
.layout
.ty
))
371 AggregateTy
::Def(def_id
, args
) => {
372 self.tcx
.type_of(def_id
).instantiate(self.tcx
, args
)
375 let variant
= if ty
.is_enum() { Some(variant) }
else { None }
;
376 let ty
= self.ecx
.layout_of(ty
).ok()?
;
378 ImmTy
::uninit(ty
).into()
379 } else if matches
!(ty
.abi
, Abi
::Scalar(..) | Abi
::ScalarPair(..)) {
380 let dest
= self.ecx
.allocate(ty
, MemoryKind
::Stack
).ok()?
;
381 let variant_dest
= if let Some(variant
) = variant
{
382 self.ecx
.project_downcast(&dest
, variant
).ok()?
386 for (field_index
, op
) in fields
.into_iter().enumerate() {
387 let field_dest
= self.ecx
.project_field(&variant_dest
, field_index
).ok()?
;
388 self.ecx
.copy_op(op
, &field_dest
, /*allow_transmute*/ false).ok()?
;
390 self.ecx
.write_discriminant(variant
.unwrap_or(FIRST_VARIANT
), &dest
).ok()?
;
391 self.ecx
.alloc_mark_immutable(dest
.ptr().provenance
.unwrap()).ok()?
;
398 Projection(base
, elem
) => {
399 let value
= self.evaluated
[base
].as_ref()?
;
400 let elem
= match elem
{
401 ProjectionElem
::Deref
=> ProjectionElem
::Deref
,
402 ProjectionElem
::Downcast(name
, read_variant
) => {
403 ProjectionElem
::Downcast(name
, read_variant
)
405 ProjectionElem
::Field(f
, ty
) => ProjectionElem
::Field(f
, ty
),
406 ProjectionElem
::ConstantIndex { offset, min_length, from_end }
=> {
407 ProjectionElem
::ConstantIndex { offset, min_length, from_end }
409 ProjectionElem
::Subslice { from, to, from_end }
=> {
410 ProjectionElem
::Subslice { from, to, from_end }
412 ProjectionElem
::OpaqueCast(ty
) => ProjectionElem
::OpaqueCast(ty
),
413 ProjectionElem
::Subtype(ty
) => ProjectionElem
::Subtype(ty
),
414 // This should have been replaced by a `ConstantIndex` earlier.
415 ProjectionElem
::Index(_
) => return None
,
417 self.ecx
.project(value
, elem
).ok()?
419 Address { place, kind, provenance: _ }
=> {
420 if !place
.is_indirect_first_projection() {
423 let local
= self.locals
[place
.local
]?
;
424 let pointer
= self.evaluated
[local
].as_ref()?
;
425 let mut mplace
= self.ecx
.deref_pointer(pointer
).ok()?
;
426 for proj
in place
.projection
.iter().skip(1) {
427 // We have no call stack to associate a local with a value, so we cannot interpret indexing.
428 if matches
!(proj
, ProjectionElem
::Index(_
)) {
431 mplace
= self.ecx
.project(&mplace
, proj
).ok()?
;
433 let pointer
= mplace
.to_ref(&self.ecx
);
434 let ty
= match kind
{
435 AddressKind
::Ref(bk
) => Ty
::new_ref(
437 self.tcx
.lifetimes
.re_erased
,
438 ty
::TypeAndMut { ty: mplace.layout.ty, mutbl: bk.to_mutbl_lossy() }
,
440 AddressKind
::Address(mutbl
) => {
441 Ty
::new_ptr(self.tcx
, TypeAndMut { ty: mplace.layout.ty, mutbl }
)
444 let layout
= self.ecx
.layout_of(ty
).ok()?
;
445 ImmTy
::from_immediate(pointer
, layout
).into()
448 Discriminant(base
) => {
449 let base
= self.evaluated
[base
].as_ref()?
;
450 let variant
= self.ecx
.read_discriminant(base
).ok()?
;
452 self.ecx
.discriminant_for_variant(base
.layout
.ty
, variant
).ok()?
;
456 let slice
= self.evaluated
[slice
].as_ref()?
;
457 let usize_layout
= self.ecx
.layout_of(self.tcx
.types
.usize).unwrap();
458 let len
= slice
.len(&self.ecx
).ok()?
;
459 let imm
= ImmTy
::try_from_uint(len
, usize_layout
)?
;
462 NullaryOp(null_op
, ty
) => {
463 let layout
= self.ecx
.layout_of(ty
).ok()?
;
464 if let NullOp
::SizeOf
| NullOp
::AlignOf
= null_op
&& layout
.is_unsized() {
467 let val
= match null_op
{
468 NullOp
::SizeOf
=> layout
.size
.bytes(),
469 NullOp
::AlignOf
=> layout
.align
.abi
.bytes(),
470 NullOp
::OffsetOf(fields
) => {
471 layout
.offset_of_subfield(&self.ecx
, fields
.iter()).bytes()
474 let usize_layout
= self.ecx
.layout_of(self.tcx
.types
.usize).unwrap();
475 let imm
= ImmTy
::try_from_uint(val
, usize_layout
)?
;
478 UnaryOp(un_op
, operand
) => {
479 let operand
= self.evaluated
[operand
].as_ref()?
;
480 let operand
= self.ecx
.read_immediate(operand
).ok()?
;
481 let (val
, _
) = self.ecx
.overflowing_unary_op(un_op
, &operand
).ok()?
;
484 BinaryOp(bin_op
, lhs
, rhs
) => {
485 let lhs
= self.evaluated
[lhs
].as_ref()?
;
486 let lhs
= self.ecx
.read_immediate(lhs
).ok()?
;
487 let rhs
= self.evaluated
[rhs
].as_ref()?
;
488 let rhs
= self.ecx
.read_immediate(rhs
).ok()?
;
489 let (val
, _
) = self.ecx
.overflowing_binary_op(bin_op
, &lhs
, &rhs
).ok()?
;
492 CheckedBinaryOp(bin_op
, lhs
, rhs
) => {
493 let lhs
= self.evaluated
[lhs
].as_ref()?
;
494 let lhs
= self.ecx
.read_immediate(lhs
).ok()?
;
495 let rhs
= self.evaluated
[rhs
].as_ref()?
;
496 let rhs
= self.ecx
.read_immediate(rhs
).ok()?
;
497 let (val
, overflowed
) = self.ecx
.overflowing_binary_op(bin_op
, &lhs
, &rhs
).ok()?
;
498 let tuple
= Ty
::new_tup_from_iter(
500 [val
.layout
.ty
, self.tcx
.types
.bool
].into_iter(),
502 let tuple
= self.ecx
.layout_of(tuple
).ok()?
;
503 ImmTy
::from_scalar_pair(val
.to_scalar(), Scalar
::from_bool(overflowed
), tuple
)
506 Cast { kind, value, from: _, to }
=> match kind
{
507 CastKind
::IntToInt
| CastKind
::IntToFloat
=> {
508 let value
= self.evaluated
[value
].as_ref()?
;
509 let value
= self.ecx
.read_immediate(value
).ok()?
;
510 let to
= self.ecx
.layout_of(to
).ok()?
;
511 let res
= self.ecx
.int_to_int_or_float(&value
, to
).ok()?
;
514 CastKind
::FloatToFloat
| CastKind
::FloatToInt
=> {
515 let value
= self.evaluated
[value
].as_ref()?
;
516 let value
= self.ecx
.read_immediate(value
).ok()?
;
517 let to
= self.ecx
.layout_of(to
).ok()?
;
518 let res
= self.ecx
.float_to_float_or_int(&value
, to
).ok()?
;
521 CastKind
::Transmute
=> {
522 let value
= self.evaluated
[value
].as_ref()?
;
523 let to
= self.ecx
.layout_of(to
).ok()?
;
524 // `offset` for immediates only supports scalar/scalar-pair ABIs,
525 // so bail out if the target is not one.
526 if value
.as_mplace_or_imm().is_right() {
527 match (value
.layout
.abi
, to
.abi
) {
528 (Abi
::Scalar(..), Abi
::Scalar(..)) => {}
529 (Abi
::ScalarPair(..), Abi
::ScalarPair(..)) => {}
533 value
.offset(Size
::ZERO
, to
, &self.ecx
).ok()?
543 place
: PlaceRef
<'tcx
>,
545 proj
: PlaceElem
<'tcx
>,
546 ) -> Option
<VnIndex
> {
547 let proj
= match proj
{
548 ProjectionElem
::Deref
=> {
549 let ty
= place
.ty(self.local_decls
, self.tcx
).ty
;
550 if let Some(Mutability
::Not
) = ty
.ref_mutability()
551 && let Some(pointee_ty
) = ty
.builtin_deref(true)
552 && pointee_ty
.ty
.is_freeze(self.tcx
, self.param_env
)
554 // An immutable borrow `_x` always points to the same value for the
555 // lifetime of the borrow, so we can merge all instances of `*_x`.
556 ProjectionElem
::Deref
561 ProjectionElem
::Downcast(name
, index
) => ProjectionElem
::Downcast(name
, index
),
562 ProjectionElem
::Field(f
, ty
) => {
563 if let Value
::Aggregate(_
, _
, fields
) = self.get(value
) {
564 return Some(fields
[f
.as_usize()]);
565 } else if let Value
::Projection(outer_value
, ProjectionElem
::Downcast(_
, read_variant
)) = self.get(value
)
566 && let Value
::Aggregate(_
, written_variant
, fields
) = self.get(*outer_value
)
567 // This pass is not aware of control-flow, so we do not know whether the
568 // replacement we are doing is actually reachable. We could be in any arm of
571 // Some(y) => /* stuff */,
572 // None => /* other */,
576 // In surface rust, the current statement would be unreachable.
578 // However, from the reference chapter on enums and RFC 2195,
579 // accessing the wrong variant is not UB if the enum has repr.
580 // So it's not impossible for a series of MIR opts to generate
581 // a downcast to an inactive variant.
582 && written_variant
== read_variant
584 return Some(fields
[f
.as_usize()]);
586 ProjectionElem
::Field(f
, ty
)
588 ProjectionElem
::Index(idx
) => {
589 if let Value
::Repeat(inner
, _
) = self.get(value
) {
592 let idx
= self.locals
[idx
]?
;
593 ProjectionElem
::Index(idx
)
595 ProjectionElem
::ConstantIndex { offset, min_length, from_end }
=> {
596 match self.get(value
) {
597 Value
::Repeat(inner
, _
) => {
600 Value
::Aggregate(AggregateTy
::Array
, _
, operands
) => {
601 let offset
= if from_end
{
602 operands
.len() - offset
as usize
606 return operands
.get(offset
).copied();
610 ProjectionElem
::ConstantIndex { offset, min_length, from_end }
612 ProjectionElem
::Subslice { from, to, from_end }
=> {
613 ProjectionElem
::Subslice { from, to, from_end }
615 ProjectionElem
::OpaqueCast(ty
) => ProjectionElem
::OpaqueCast(ty
),
616 ProjectionElem
::Subtype(ty
) => ProjectionElem
::Subtype(ty
),
619 Some(self.insert(Value
::Projection(value
, proj
)))
622 /// Simplify the projection chain if we know better.
623 #[instrument(level = "trace", skip(self))]
624 fn simplify_place_projection(&mut self, place
: &mut Place
<'tcx
>, location
: Location
) {
625 // If the projection is indirect, we treat the local as a value, so can replace it with
627 if place
.is_indirect()
628 && let Some(base
) = self.locals
[place
.local
]
629 && let Some(new_local
) = self.try_as_local(base
, location
)
631 place
.local
= new_local
;
632 self.reused_locals
.insert(new_local
);
635 let mut projection
= Cow
::Borrowed(&place
.projection
[..]);
637 for i
in 0..projection
.len() {
638 let elem
= projection
[i
];
639 if let ProjectionElem
::Index(idx
) = elem
640 && let Some(idx
) = self.locals
[idx
]
642 if let Some(offset
) = self.evaluated
[idx
].as_ref()
643 && let Ok(offset
) = self.ecx
.read_target_usize(offset
)
645 projection
.to_mut()[i
] = ProjectionElem
::ConstantIndex
{
647 min_length
: offset
+ 1,
650 } else if let Some(new_idx
) = self.try_as_local(idx
, location
) {
651 projection
.to_mut()[i
] = ProjectionElem
::Index(new_idx
);
652 self.reused_locals
.insert(new_idx
);
657 if projection
.is_owned() {
658 place
.projection
= self.tcx
.mk_place_elems(&projection
);
664 /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
665 /// place with the same value (if that already exists).
666 #[instrument(level = "trace", skip(self), ret)]
667 fn simplify_place_value(
669 place
: &mut Place
<'tcx
>,
671 ) -> Option
<VnIndex
> {
672 self.simplify_place_projection(place
, location
);
674 // Invariant: `place` and `place_ref` point to the same value, even if they point to
675 // different memory locations.
676 let mut place_ref
= place
.as_ref();
678 // Invariant: `value` holds the value up-to the `index`th projection excluded.
679 let mut value
= self.locals
[place
.local
]?
;
680 for (index
, proj
) in place
.projection
.iter().enumerate() {
681 if let Some(local
) = self.try_as_local(value
, location
) {
682 // Both `local` and `Place { local: place.local, projection: projection[..index] }`
683 // hold the same value. Therefore, following place holds the value in the original
685 place_ref
= PlaceRef { local, projection: &place.projection[index..] }
;
688 let base
= PlaceRef { local: place.local, projection: &place.projection[..index] }
;
689 value
= self.project(base
, value
, proj
)?
;
692 if let Some(new_local
) = self.try_as_local(value
, location
) {
693 place_ref
= PlaceRef { local: new_local, projection: &[] }
;
696 if place_ref
.local
!= place
.local
|| place_ref
.projection
.len() < place
.projection
.len() {
697 // By the invariant on `place_ref`.
698 *place
= place_ref
.project_deeper(&[], self.tcx
);
699 self.reused_locals
.insert(place_ref
.local
);
705 #[instrument(level = "trace", skip(self), ret)]
708 operand
: &mut Operand
<'tcx
>,
710 ) -> Option
<VnIndex
> {
712 Operand
::Constant(ref mut constant
) => {
713 let const_
= constant
.const_
.normalize(self.tcx
, self.param_env
);
714 self.insert_constant(const_
)
716 Operand
::Copy(ref mut place
) | Operand
::Move(ref mut place
) => {
717 let value
= self.simplify_place_value(place
, location
)?
;
718 if let Some(const_
) = self.try_as_constant(value
) {
719 *operand
= Operand
::Constant(Box
::new(const_
));
726 #[instrument(level = "trace", skip(self), ret)]
729 rvalue
: &mut Rvalue
<'tcx
>,
731 ) -> Option
<VnIndex
> {
732 let value
= match *rvalue
{
734 Rvalue
::Use(ref mut operand
) => return self.simplify_operand(operand
, location
),
735 Rvalue
::CopyForDeref(place
) => {
736 let mut operand
= Operand
::Copy(place
);
737 let val
= self.simplify_operand(&mut operand
, location
);
738 *rvalue
= Rvalue
::Use(operand
);
743 Rvalue
::Repeat(ref mut op
, amount
) => {
744 let op
= self.simplify_operand(op
, location
)?
;
745 Value
::Repeat(op
, amount
)
747 Rvalue
::NullaryOp(op
, ty
) => Value
::NullaryOp(op
, ty
),
748 Rvalue
::Aggregate(..) => return self.simplify_aggregate(rvalue
, location
),
749 Rvalue
::Ref(_
, borrow_kind
, ref mut place
) => {
750 self.simplify_place_projection(place
, location
);
751 return self.new_pointer(*place
, AddressKind
::Ref(borrow_kind
));
753 Rvalue
::AddressOf(mutbl
, ref mut place
) => {
754 self.simplify_place_projection(place
, location
);
755 return self.new_pointer(*place
, AddressKind
::Address(mutbl
));
759 Rvalue
::Len(ref mut place
) => {
760 let place
= self.simplify_place_value(place
, location
)?
;
763 Rvalue
::Cast(kind
, ref mut value
, to
) => {
764 let from
= value
.ty(self.local_decls
, self.tcx
);
765 let value
= self.simplify_operand(value
, location
)?
;
766 if let CastKind
::PointerCoercion(
767 PointerCoercion
::ReifyFnPointer
| PointerCoercion
::ClosureFnPointer(_
),
770 // Each reification of a generic fn may get a different pointer.
771 // Do not try to merge them.
772 return self.new_opaque();
774 Value
::Cast { kind, value, from, to }
776 Rvalue
::BinaryOp(op
, box (ref mut lhs
, ref mut rhs
)) => {
777 let lhs
= self.simplify_operand(lhs
, location
);
778 let rhs
= self.simplify_operand(rhs
, location
);
779 Value
::BinaryOp(op
, lhs?
, rhs?
)
781 Rvalue
::CheckedBinaryOp(op
, box (ref mut lhs
, ref mut rhs
)) => {
782 let lhs
= self.simplify_operand(lhs
, location
);
783 let rhs
= self.simplify_operand(rhs
, location
);
784 Value
::CheckedBinaryOp(op
, lhs?
, rhs?
)
786 Rvalue
::UnaryOp(op
, ref mut arg
) => {
787 let arg
= self.simplify_operand(arg
, location
)?
;
788 Value
::UnaryOp(op
, arg
)
790 Rvalue
::Discriminant(ref mut place
) => {
791 let place
= self.simplify_place_value(place
, location
)?
;
792 if let Some(discr
) = self.simplify_discriminant(place
) {
795 Value
::Discriminant(place
)
798 // Unsupported values.
799 Rvalue
::ThreadLocalRef(..) | Rvalue
::ShallowInitBox(..) => return None
,
802 Some(self.insert(value
))
805 fn simplify_discriminant(&mut self, place
: VnIndex
) -> Option
<VnIndex
> {
806 if let Value
::Aggregate(enum_ty
, variant
, _
) = *self.get(place
)
807 && let AggregateTy
::Def(enum_did
, enum_substs
) = enum_ty
808 && let DefKind
::Enum
= self.tcx
.def_kind(enum_did
)
810 let enum_ty
= self.tcx
.type_of(enum_did
).instantiate(self.tcx
, enum_substs
);
811 let discr
= self.ecx
.discriminant_for_variant(enum_ty
, variant
).ok()?
;
812 return Some(self.insert_scalar(discr
.to_scalar(), discr
.layout
.ty
));
818 fn simplify_aggregate(
820 rvalue
: &mut Rvalue
<'tcx
>,
822 ) -> Option
<VnIndex
> {
823 let Rvalue
::Aggregate(box ref kind
, ref mut fields
) = *rvalue
else { bug!() }
;
826 if fields
.is_empty() {
827 let is_zst
= match *kind
{
828 AggregateKind
::Array(..) | AggregateKind
::Tuple
| AggregateKind
::Closure(..) => {
831 // Only enums can be non-ZST.
832 AggregateKind
::Adt(did
, ..) => tcx
.def_kind(did
) != DefKind
::Enum
,
833 // Coroutines are never ZST, as they at least contain the implicit states.
834 AggregateKind
::Coroutine(..) => false,
838 let ty
= rvalue
.ty(self.local_decls
, tcx
);
839 return self.insert_constant(Const
::zero_sized(ty
));
843 let (ty
, variant_index
) = match *kind
{
844 AggregateKind
::Array(..) => {
845 assert
!(!fields
.is_empty());
846 (AggregateTy
::Array
, FIRST_VARIANT
)
848 AggregateKind
::Tuple
=> {
849 assert
!(!fields
.is_empty());
850 (AggregateTy
::Tuple
, FIRST_VARIANT
)
852 AggregateKind
::Closure(did
, substs
) | AggregateKind
::Coroutine(did
, substs
, _
) => {
853 (AggregateTy
::Def(did
, substs
), FIRST_VARIANT
)
855 AggregateKind
::Adt(did
, variant_index
, substs
, _
, None
) => {
856 (AggregateTy
::Def(did
, substs
), variant_index
)
858 // Do not track unions.
859 AggregateKind
::Adt(_
, _
, _
, _
, Some(_
)) => return None
,
862 let fields
: Option
<Vec
<_
>> = fields
864 .map(|op
| self.simplify_operand(op
, location
).or_else(|| self.new_opaque()))
866 let fields
= fields?
;
868 if let AggregateTy
::Array
= ty
&& fields
.len() > 4 {
869 let first
= fields
[0];
870 if fields
.iter().all(|&v
| v
== first
) {
871 let len
= ty
::Const
::from_target_usize(self.tcx
, fields
.len().try_into().unwrap());
872 if let Some(const_
) = self.try_as_constant(first
) {
873 *rvalue
= Rvalue
::Repeat(Operand
::Constant(Box
::new(const_
)), len
);
874 } else if let Some(local
) = self.try_as_local(first
, location
) {
875 *rvalue
= Rvalue
::Repeat(Operand
::Copy(local
.into()), len
);
876 self.reused_locals
.insert(local
);
878 return Some(self.insert(Value
::Repeat(first
, len
)));
882 Some(self.insert(Value
::Aggregate(ty
, variant_index
, fields
)))
886 fn op_to_prop_const
<'tcx
>(
887 ecx
: &mut InterpCx
<'_
, 'tcx
, DummyMachine
>,
889 ) -> Option
<ConstValue
<'tcx
>> {
890 // Do not attempt to propagate unsized locals.
891 if op
.layout
.is_unsized() {
895 // This constant is a ZST, just return an empty value.
896 if op
.layout
.is_zst() {
897 return Some(ConstValue
::ZeroSized
);
900 // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to avoid.
901 if !matches
!(op
.layout
.abi
, Abi
::Scalar(..) | Abi
::ScalarPair(..)) {
905 // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
906 if let Abi
::Scalar(abi
::Scalar
::Initialized { .. }
) = op
.layout
.abi
907 && let Ok(scalar
) = ecx
.read_scalar(op
)
908 && scalar
.try_to_int().is_ok()
910 return Some(ConstValue
::Scalar(scalar
));
913 // If this constant is already represented as an `Allocation`,
914 // try putting it into global memory to return it.
915 if let Either
::Left(mplace
) = op
.as_mplace_or_imm() {
916 let (size
, _align
) = ecx
.size_and_align_of_mplace(&mplace
).ok()??
;
918 // Do not try interning a value that contains provenance.
919 // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
920 // FIXME: remove this hack once that issue is fixed.
921 let alloc_ref
= ecx
.get_ptr_alloc(mplace
.ptr(), size
).ok()??
;
922 if alloc_ref
.has_provenance() {
926 let pointer
= mplace
.ptr().into_pointer_or_addr().ok()?
;
927 let (alloc_id
, offset
) = pointer
.into_parts();
928 intern_const_alloc_for_constprop(ecx
, alloc_id
).ok()?
;
929 if matches
!(ecx
.tcx
.global_alloc(alloc_id
), GlobalAlloc
::Memory(_
)) {
930 // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
931 // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
932 // FIXME: find a way to treat this more uniformly
933 // (probably by fixing codegen)
934 return Some(ConstValue
::Indirect { alloc_id, offset }
);
938 // Everything failed: create a new allocation to hold the data.
940 ecx
.intern_with_temp_alloc(op
.layout
, |ecx
, dest
| ecx
.copy_op(op
, dest
, false)).ok()?
;
941 let value
= ConstValue
::Indirect { alloc_id, offset: Size::ZERO }
;
943 // Check that we do not leak a pointer.
944 // Those pointers may lose part of their identity in codegen.
945 // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
946 if ecx
.tcx
.global_alloc(alloc_id
).unwrap_memory().inner().provenance().ptrs().is_empty() {
953 impl<'tcx
> VnState
<'_
, 'tcx
> {
954 /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
955 fn try_as_constant(&mut self, index
: VnIndex
) -> Option
<ConstOperand
<'tcx
>> {
956 // This was already constant in MIR, do not change it.
957 if let Value
::Constant { value, disambiguator: _ }
= *self.get(index
)
958 // If the constant is not deterministic, adding an additional mention of it in MIR will
959 // not give the same value as the former mention.
960 && value
.is_deterministic()
962 return Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_: value }
);
965 let op
= self.evaluated
[index
].as_ref()?
;
966 if op
.layout
.is_unsized() {
967 // Do not attempt to propagate unsized locals.
971 let value
= op_to_prop_const(&mut self.ecx
, op
)?
;
973 // Check that we do not leak a pointer.
974 // Those pointers may lose part of their identity in codegen.
975 // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
976 assert
!(!value
.may_have_provenance(self.tcx
, op
.layout
.size
));
978 let const_
= Const
::Val(value
, op
.layout
.ty
);
979 Some(ConstOperand { span: rustc_span::DUMMY_SP, user_ty: None, const_ }
)
982 /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
984 fn try_as_local(&mut self, index
: VnIndex
, loc
: Location
) -> Option
<Local
> {
985 let other
= self.rev_locals
.get(&index
)?
;
989 .find(|&other
| self.ssa
.assignment_dominates(self.dominators
, other
, loc
))
993 impl<'tcx
> MutVisitor
<'tcx
> for VnState
<'_
, 'tcx
> {
994 fn tcx(&self) -> TyCtxt
<'tcx
> {
998 fn visit_place(&mut self, place
: &mut Place
<'tcx
>, _
: PlaceContext
, location
: Location
) {
999 self.simplify_place_projection(place
, location
);
1002 fn visit_operand(&mut self, operand
: &mut Operand
<'tcx
>, location
: Location
) {
1003 self.simplify_operand(operand
, location
);
1006 fn visit_statement(&mut self, stmt
: &mut Statement
<'tcx
>, location
: Location
) {
1007 if let StatementKind
::Assign(box (_
, ref mut rvalue
)) = stmt
.kind
1008 // Do not try to simplify a constant, it's already in canonical shape.
1009 && !matches
!(rvalue
, Rvalue
::Use(Operand
::Constant(_
)))
1011 if let Some(value
) = self.simplify_rvalue(rvalue
, location
)
1013 if let Some(const_
) = self.try_as_constant(value
) {
1014 *rvalue
= Rvalue
::Use(Operand
::Constant(Box
::new(const_
)));
1015 } else if let Some(local
) = self.try_as_local(value
, location
)
1016 && *rvalue
!= Rvalue
::Use(Operand
::Move(local
.into()))
1018 *rvalue
= Rvalue
::Use(Operand
::Copy(local
.into()));
1019 self.reused_locals
.insert(local
);
1023 self.super_statement(stmt
, location
);
1028 struct StorageRemover
<'tcx
> {
1030 reused_locals
: BitSet
<Local
>,
1033 impl<'tcx
> MutVisitor
<'tcx
> for StorageRemover
<'tcx
> {
1034 fn tcx(&self) -> TyCtxt
<'tcx
> {
1038 fn visit_operand(&mut self, operand
: &mut Operand
<'tcx
>, _
: Location
) {
1039 if let Operand
::Move(place
) = *operand
1040 && let Some(local
) = place
.as_local()
1041 && self.reused_locals
.contains(local
)
1043 *operand
= Operand
::Copy(place
);
1047 fn visit_statement(&mut self, stmt
: &mut Statement
<'tcx
>, loc
: Location
) {
1049 // When removing storage statements, we need to remove both (#107511).
1050 StatementKind
::StorageLive(l
) | StatementKind
::StorageDead(l
)
1051 if self.reused_locals
.contains(l
) =>
1055 _
=> self.super_statement(stmt
, loc
),