1 //! A constant propagation optimization pass based on dataflow analysis.
3 //! Currently, this pass only propagates scalar values.
5 use rustc_const_eval
::const_eval
::CheckAlignment
;
6 use rustc_const_eval
::interpret
::{ConstValue, ImmTy, Immediate, InterpCx, Scalar}
;
7 use rustc_data_structures
::fx
::FxHashMap
;
8 use rustc_hir
::def
::DefKind
;
9 use rustc_middle
::mir
::visit
::{MutVisitor, Visitor}
;
10 use rustc_middle
::mir
::*;
11 use rustc_middle
::ty
::layout
::TyAndLayout
;
12 use rustc_middle
::ty
::{self, Ty, TyCtxt}
;
13 use rustc_mir_dataflow
::value_analysis
::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace}
;
14 use rustc_mir_dataflow
::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects}
;
15 use rustc_span
::DUMMY_SP
;
16 use rustc_target
::abi
::{Align, FieldIdx, VariantIdx}
;
20 // These constants are somewhat random guesses and have not been optimized.
21 // If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive).
22 const BLOCK_LIMIT
: usize = 100;
23 const PLACE_LIMIT
: usize = 100;
25 pub struct DataflowConstProp
;
27 impl<'tcx
> MirPass
<'tcx
> for DataflowConstProp
{
28 fn is_enabled(&self, sess
: &rustc_session
::Session
) -> bool
{
29 sess
.mir_opt_level() >= 3
32 #[instrument(skip_all level = "debug")]
33 fn run_pass(&self, tcx
: TyCtxt
<'tcx
>, body
: &mut Body
<'tcx
>) {
34 debug
!(def_id
= ?body
.source
.def_id());
35 if tcx
.sess
.mir_opt_level() < 4 && body
.basic_blocks
.len() > BLOCK_LIMIT
{
36 debug
!("aborted dataflow const prop due too many basic blocks");
40 // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
41 // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
42 // applications, where `h` is the height of the lattice. Because the height of our lattice
43 // is linear w.r.t. the number of tracked places, this is `O(tracked_places * n)`. However,
44 // because every transfer function application could traverse the whole map, this becomes
45 // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
46 // map nodes is strongly correlated to the number of tracked places, this becomes more or
47 // less `O(n)` if we place a constant limit on the number of tracked places.
48 let place_limit
= if tcx
.sess
.mir_opt_level() < 4 { Some(PLACE_LIMIT) }
else { None }
;
50 // Decide which places to track during the analysis.
51 let map
= Map
::from_filter(tcx
, body
, Ty
::is_scalar
, place_limit
);
53 // Perform the actual dataflow analysis.
54 let analysis
= ConstAnalysis
::new(tcx
, body
, map
);
55 let results
= debug_span
!("analyze")
56 .in_scope(|| analysis
.wrap().into_engine(tcx
, body
).iterate_to_fixpoint());
58 // Collect results and patch the body afterwards.
59 let mut visitor
= CollectAndPatch
::new(tcx
, &results
.analysis
.0.map
);
60 debug_span
!("collect").in_scope(|| results
.visit_reachable_with(body
, &mut visitor
));
61 debug_span
!("patch").in_scope(|| visitor
.visit_body(body
));
65 struct ConstAnalysis
<'a
, 'tcx
> {
68 local_decls
: &'a LocalDecls
<'tcx
>,
69 ecx
: InterpCx
<'tcx
, 'tcx
, DummyMachine
>,
70 param_env
: ty
::ParamEnv
<'tcx
>,
73 impl<'tcx
> ConstAnalysis
<'_
, 'tcx
> {
77 variant_index
: VariantIdx
,
78 ) -> Option
<ScalarTy
<'tcx
>> {
79 if !enum_ty
.is_enum() {
82 let discr
= enum_ty
.discriminant_for_variant(self.tcx
, variant_index
)?
;
83 let discr_layout
= self.tcx
.layout_of(self.param_env
.and(discr
.ty
)).ok()?
;
84 let discr_value
= Scalar
::try_from_uint(discr
.val
, discr_layout
.size
)?
;
85 Some(ScalarTy(discr_value
, discr
.ty
))
89 impl<'tcx
> ValueAnalysis
<'tcx
> for ConstAnalysis
<'_
, 'tcx
> {
90 type Value
= FlatSet
<ScalarTy
<'tcx
>>;
92 const NAME
: &'
static str = "ConstAnalysis";
94 fn map(&self) -> &Map
{
98 fn handle_statement(&self, statement
: &Statement
<'tcx
>, state
: &mut State
<Self::Value
>) {
99 match statement
.kind
{
100 StatementKind
::SetDiscriminant { box ref place, variant_index }
=> {
101 state
.flood_discr(place
.as_ref(), &self.map
);
102 if self.map
.find_discr(place
.as_ref()).is_some() {
103 let enum_ty
= place
.ty(self.local_decls
, self.tcx
).ty
;
104 if let Some(discr
) = self.eval_discriminant(enum_ty
, variant_index
) {
107 ValueOrPlace
::Value(FlatSet
::Elem(discr
)),
113 _
=> self.super_statement(statement
, state
),
120 rvalue
: &Rvalue
<'tcx
>,
121 state
: &mut State
<Self::Value
>,
124 Rvalue
::Aggregate(kind
, operands
) => {
125 // If we assign `target = Enum::Variant#0(operand)`,
126 // we must make sure that all `target as Variant#i` are `Top`.
127 state
.flood(target
.as_ref(), self.map());
129 if let Some(target_idx
) = self.map().find(target
.as_ref()) {
130 let (variant_target
, variant_index
) = match **kind
{
131 AggregateKind
::Tuple
| AggregateKind
::Closure(..) => {
132 (Some(target_idx
), None
)
134 AggregateKind
::Adt(def_id
, variant_index
, ..) => {
135 match self.tcx
.def_kind(def_id
) {
136 DefKind
::Struct
=> (Some(target_idx
), None
),
138 self.map
.apply(target_idx
, TrackElem
::Variant(variant_index
)),
146 if let Some(variant_target_idx
) = variant_target
{
147 for (field_index
, operand
) in operands
.iter().enumerate() {
148 if let Some(field
) = self.map().apply(
150 TrackElem
::Field(FieldIdx
::from_usize(field_index
)),
152 let result
= self.handle_operand(operand
, state
);
153 state
.insert_idx(field
, result
, self.map());
157 if let Some(variant_index
) = variant_index
158 && let Some(discr_idx
) = self.map().apply(target_idx
, TrackElem
::Discriminant
)
160 // We are assigning the discriminant as part of an aggregate.
161 // This discriminant can only alias a variant field's value if the operand
162 // had an invalid value for that type.
163 // Using invalid values is UB, so we are allowed to perform the assignment
164 // without extra flooding.
165 let enum_ty
= target
.ty(self.local_decls
, self.tcx
).ty
;
166 if let Some(discr_val
) = self.eval_discriminant(enum_ty
, variant_index
) {
167 state
.insert_value_idx(discr_idx
, FlatSet
::Elem(discr_val
), &self.map
);
172 Rvalue
::CheckedBinaryOp(op
, box (left
, right
)) => {
173 // Flood everything now, so we can use `insert_value_idx` directly later.
174 state
.flood(target
.as_ref(), self.map());
176 let target
= self.map().find(target
.as_ref());
178 let value_target
= target
179 .and_then(|target
| self.map().apply(target
, TrackElem
::Field(0_u32.into())));
180 let overflow_target
= target
181 .and_then(|target
| self.map().apply(target
, TrackElem
::Field(1_u32.into())));
183 if value_target
.is_some() || overflow_target
.is_some() {
184 let (val
, overflow
) = self.binary_op(state
, *op
, left
, right
);
186 if let Some(value_target
) = value_target
{
187 // We have flooded `target` earlier.
188 state
.insert_value_idx(value_target
, val
, self.map());
190 if let Some(overflow_target
) = overflow_target
{
191 let overflow
= match overflow
{
192 FlatSet
::Top
=> FlatSet
::Top
,
193 FlatSet
::Elem(overflow
) => {
194 self.wrap_scalar(Scalar
::from_bool(overflow
), self.tcx
.types
.bool
)
196 FlatSet
::Bottom
=> FlatSet
::Bottom
,
198 // We have flooded `target` earlier.
199 state
.insert_value_idx(overflow_target
, overflow
, self.map());
203 _
=> self.super_assign(target
, rvalue
, state
),
209 rvalue
: &Rvalue
<'tcx
>,
210 state
: &mut State
<Self::Value
>,
211 ) -> ValueOrPlace
<Self::Value
> {
214 kind @
(CastKind
::IntToInt
215 | CastKind
::FloatToInt
216 | CastKind
::FloatToFloat
217 | CastKind
::IntToFloat
),
220 ) => match self.eval_operand(operand
, state
) {
221 FlatSet
::Elem(op
) => match kind
{
222 CastKind
::IntToInt
| CastKind
::IntToFloat
=> {
223 self.ecx
.int_to_int_or_float(&op
, *ty
)
225 CastKind
::FloatToInt
| CastKind
::FloatToFloat
=> {
226 self.ecx
.float_to_float_or_int(&op
, *ty
)
230 .map(|result
| ValueOrPlace
::Value(self.wrap_immediate(result
, *ty
)))
231 .unwrap_or(ValueOrPlace
::top()),
232 _
=> ValueOrPlace
::top(),
234 Rvalue
::BinaryOp(op
, box (left
, right
)) => {
235 // Overflows must be ignored here.
236 let (val
, _overflow
) = self.binary_op(state
, *op
, left
, right
);
237 ValueOrPlace
::Value(val
)
239 Rvalue
::UnaryOp(op
, operand
) => match self.eval_operand(operand
, state
) {
240 FlatSet
::Elem(value
) => self
242 .unary_op(*op
, &value
)
243 .map(|val
| ValueOrPlace
::Value(self.wrap_immty(val
)))
244 .unwrap_or(ValueOrPlace
::Value(FlatSet
::Top
)),
245 FlatSet
::Bottom
=> ValueOrPlace
::Value(FlatSet
::Bottom
),
246 FlatSet
::Top
=> ValueOrPlace
::Value(FlatSet
::Top
),
248 Rvalue
::Discriminant(place
) => {
249 ValueOrPlace
::Value(state
.get_discr(place
.as_ref(), self.map()))
251 _
=> self.super_rvalue(rvalue
, state
),
257 constant
: &Constant
<'tcx
>,
258 _state
: &mut State
<Self::Value
>,
262 .eval(self.tcx
, self.param_env
)
264 .map(|value
| FlatSet
::Elem(ScalarTy(value
, constant
.ty())))
265 .unwrap_or(FlatSet
::Top
)
268 fn handle_switch_int(
270 discr
: &Operand
<'tcx
>,
271 apply_edge_effects
: &mut impl SwitchIntEdgeEffects
<State
<Self::Value
>>,
273 // FIXME: The dataflow framework only provides the state if we call `apply()`, which makes
274 // this more inefficient than it has to be.
275 let mut discr_value
= None
;
276 let mut handled
= false;
277 apply_edge_effects
.apply(|state
, target
| {
278 let discr_value
= match discr_value
{
279 Some(value
) => value
,
281 let value
= match self.handle_operand(discr
, state
) {
282 ValueOrPlace
::Value(value
) => value
,
283 ValueOrPlace
::Place(place
) => state
.get_idx(place
, self.map()),
285 let result
= match value
{
286 FlatSet
::Top
=> FlatSet
::Top
,
287 FlatSet
::Elem(ScalarTy(scalar
, _
)) => {
288 let int
= scalar
.assert_int();
289 FlatSet
::Elem(int
.assert_bits(int
.size()))
291 FlatSet
::Bottom
=> FlatSet
::Bottom
,
293 discr_value
= Some(result
);
298 let FlatSet
::Elem(choice
) = discr_value
else {
299 // Do nothing if we don't know which branch will be taken.
303 if target
.value
.map(|n
| n
== choice
).unwrap_or(!handled
) {
304 // Branch is taken. Has no effect on state.
307 // Branch is not taken.
308 state
.mark_unreachable();
314 #[derive(Clone, PartialEq, Eq)]
315 struct ScalarTy
<'tcx
>(Scalar
, Ty
<'tcx
>);
317 impl<'tcx
> std
::fmt
::Debug
for ScalarTy
<'tcx
> {
318 fn fmt(&self, f
: &mut std
::fmt
::Formatter
<'_
>) -> std
::fmt
::Result
{
319 // This is used for dataflow visualization, so we return something more concise.
320 std
::fmt
::Display
::fmt(&ConstantKind
::Val(ConstValue
::Scalar(self.0), self.1), f
)
324 impl<'a
, 'tcx
> ConstAnalysis
<'a
, 'tcx
> {
325 pub fn new(tcx
: TyCtxt
<'tcx
>, body
: &'a Body
<'tcx
>, map
: Map
) -> Self {
326 let param_env
= tcx
.param_env(body
.source
.def_id());
330 local_decls
: &body
.local_decls
,
331 ecx
: InterpCx
::new(tcx
, DUMMY_SP
, param_env
, DummyMachine
),
332 param_env
: param_env
,
338 state
: &mut State
<FlatSet
<ScalarTy
<'tcx
>>>,
340 left
: &Operand
<'tcx
>,
341 right
: &Operand
<'tcx
>,
342 ) -> (FlatSet
<ScalarTy
<'tcx
>>, FlatSet
<bool
>) {
343 let left
= self.eval_operand(left
, state
);
344 let right
= self.eval_operand(right
, state
);
345 match (left
, right
) {
346 (FlatSet
::Elem(left
), FlatSet
::Elem(right
)) => {
347 match self.ecx
.overflowing_binary_op(op
, &left
, &right
) {
348 Ok((val
, overflow
, ty
)) => (self.wrap_scalar(val
, ty
), FlatSet
::Elem(overflow
)),
349 _
=> (FlatSet
::Top
, FlatSet
::Top
),
352 (FlatSet
::Bottom
, _
) | (_
, FlatSet
::Bottom
) => (FlatSet
::Bottom
, FlatSet
::Bottom
),
354 // Could attempt some algebraic simplifcations here.
355 (FlatSet
::Top
, FlatSet
::Top
)
363 state
: &mut State
<FlatSet
<ScalarTy
<'tcx
>>>,
364 ) -> FlatSet
<ImmTy
<'tcx
>> {
365 let value
= match self.handle_operand(op
, state
) {
366 ValueOrPlace
::Value(value
) => value
,
367 ValueOrPlace
::Place(place
) => state
.get_idx(place
, &self.map
),
370 FlatSet
::Top
=> FlatSet
::Top
,
371 FlatSet
::Elem(ScalarTy(scalar
, ty
)) => self
373 .layout_of(self.param_env
.and(ty
))
374 .map(|layout
| FlatSet
::Elem(ImmTy
::from_scalar(scalar
, layout
)))
375 .unwrap_or(FlatSet
::Top
),
376 FlatSet
::Bottom
=> FlatSet
::Bottom
,
380 fn wrap_scalar(&self, scalar
: Scalar
, ty
: Ty
<'tcx
>) -> FlatSet
<ScalarTy
<'tcx
>> {
381 FlatSet
::Elem(ScalarTy(scalar
, ty
))
384 fn wrap_immediate(&self, imm
: Immediate
, ty
: Ty
<'tcx
>) -> FlatSet
<ScalarTy
<'tcx
>> {
386 Immediate
::Scalar(scalar
) => self.wrap_scalar(scalar
, ty
),
391 fn wrap_immty(&self, val
: ImmTy
<'tcx
>) -> FlatSet
<ScalarTy
<'tcx
>> {
392 self.wrap_immediate(*val
, val
.layout
.ty
)
396 struct CollectAndPatch
<'tcx
, 'map
> {
400 /// For a given MIR location, this stores the values of the operands used by that location. In
401 /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
402 /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
403 before_effect
: FxHashMap
<(Location
, Place
<'tcx
>), ScalarTy
<'tcx
>>,
405 /// Stores the assigned values for assignments where the Rvalue is constant.
406 assignments
: FxHashMap
<Location
, ScalarTy
<'tcx
>>,
409 impl<'tcx
, 'map
> CollectAndPatch
<'tcx
, 'map
> {
410 fn new(tcx
: TyCtxt
<'tcx
>, map
: &'map Map
) -> Self {
411 Self { tcx, map, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
414 fn make_operand(&self, scalar
: ScalarTy
<'tcx
>) -> Operand
<'tcx
> {
415 Operand
::Constant(Box
::new(Constant
{
418 literal
: ConstantKind
::Val(ConstValue
::Scalar(scalar
.0), scalar
.1),
423 impl<'mir
, 'tcx
, 'map
> ResultsVisitor
<'mir
, 'tcx
> for CollectAndPatch
<'tcx
, 'map
> {
424 type FlowState
= State
<FlatSet
<ScalarTy
<'tcx
>>>;
426 fn visit_statement_before_primary_effect(
428 state
: &Self::FlowState
,
429 statement
: &'mir Statement
<'tcx
>,
432 match &statement
.kind
{
433 StatementKind
::Assign(box (_
, rvalue
)) => {
434 OperandCollector { state, visitor: self }
.visit_rvalue(rvalue
, location
);
440 fn visit_statement_after_primary_effect(
442 state
: &Self::FlowState
,
443 statement
: &'mir Statement
<'tcx
>,
446 match statement
.kind
{
447 StatementKind
::Assign(box (_
, Rvalue
::Use(Operand
::Constant(_
)))) => {
448 // Don't overwrite the assignment if it already uses a constant (to keep the span).
450 StatementKind
::Assign(box (place
, _
)) => match state
.get(place
.as_ref(), self.map
) {
452 FlatSet
::Elem(value
) => {
453 self.assignments
.insert(location
, value
);
456 // This assignment is either unreachable, or an uninitialized value is assigned.
463 fn visit_terminator_before_primary_effect(
465 state
: &Self::FlowState
,
466 terminator
: &'mir Terminator
<'tcx
>,
469 OperandCollector { state, visitor: self }
.visit_terminator(terminator
, location
);
473 impl<'tcx
, 'map
> MutVisitor
<'tcx
> for CollectAndPatch
<'tcx
, 'map
> {
474 fn tcx
<'a
>(&'a
self) -> TyCtxt
<'tcx
> {
478 fn visit_statement(&mut self, statement
: &mut Statement
<'tcx
>, location
: Location
) {
479 if let Some(value
) = self.assignments
.get(&location
) {
480 match &mut statement
.kind
{
481 StatementKind
::Assign(box (_
, rvalue
)) => {
482 *rvalue
= Rvalue
::Use(self.make_operand(value
.clone()));
484 _
=> bug
!("found assignment info for non-assign statement"),
487 self.super_statement(statement
, location
);
491 fn visit_operand(&mut self, operand
: &mut Operand
<'tcx
>, location
: Location
) {
493 Operand
::Copy(place
) | Operand
::Move(place
) => {
494 if let Some(value
) = self.before_effect
.get(&(location
, *place
)) {
495 *operand
= self.make_operand(value
.clone());
503 struct OperandCollector
<'tcx
, 'map
, 'a
> {
504 state
: &'a State
<FlatSet
<ScalarTy
<'tcx
>>>,
505 visitor
: &'a
mut CollectAndPatch
<'tcx
, 'map
>,
508 impl<'tcx
, 'map
, 'a
> Visitor
<'tcx
> for OperandCollector
<'tcx
, 'map
, 'a
> {
509 fn visit_operand(&mut self, operand
: &Operand
<'tcx
>, location
: Location
) {
511 Operand
::Copy(place
) | Operand
::Move(place
) => {
512 match self.state
.get(place
.as_ref(), self.visitor
.map
) {
514 FlatSet
::Elem(value
) => {
515 self.visitor
.before_effect
.insert((location
, *place
), value
);
517 FlatSet
::Bottom
=> (),
524 fn visit_rvalue(&mut self, rvalue
: &Rvalue
<'tcx
>, location
: Location
) {
526 Rvalue
::Discriminant(place
) => {
527 match self.state
.get_discr(place
.as_ref(), self.visitor
.map
) {
529 FlatSet
::Elem(value
) => {
530 self.visitor
.before_effect
.insert((location
, *place
), value
);
532 FlatSet
::Bottom
=> (),
535 _
=> self.super_rvalue(rvalue
, location
),
542 impl<'mir
, 'tcx
> rustc_const_eval
::interpret
::Machine
<'mir
, 'tcx
> for DummyMachine
{
543 rustc_const_eval
::interpret
::compile_time_machine
!(<'mir
, 'tcx
>);
545 const PANIC_ON_ALLOC_FAIL
: bool
= true;
547 fn enforce_alignment(_ecx
: &InterpCx
<'mir
, 'tcx
, Self>) -> CheckAlignment
{
551 fn enforce_validity(_ecx
: &InterpCx
<'mir
, 'tcx
, Self>, _layout
: TyAndLayout
<'tcx
>) -> bool
{
554 fn alignment_check_failed(
555 _ecx
: &InterpCx
<'mir
, 'tcx
, Self>,
558 _check
: CheckAlignment
,
559 ) -> interpret
::InterpResult
<'tcx
, ()> {
563 fn find_mir_or_eval_fn(
564 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
565 _instance
: ty
::Instance
<'tcx
>,
566 _abi
: rustc_target
::spec
::abi
::Abi
,
567 _args
: &[rustc_const_eval
::interpret
::OpTy
<'tcx
, Self::Provenance
>],
568 _destination
: &rustc_const_eval
::interpret
::PlaceTy
<'tcx
, Self::Provenance
>,
569 _target
: Option
<BasicBlock
>,
570 _unwind
: UnwindAction
,
571 ) -> interpret
::InterpResult
<'tcx
, Option
<(&'mir Body
<'tcx
>, ty
::Instance
<'tcx
>)>> {
576 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
577 _instance
: ty
::Instance
<'tcx
>,
578 _args
: &[rustc_const_eval
::interpret
::OpTy
<'tcx
, Self::Provenance
>],
579 _destination
: &rustc_const_eval
::interpret
::PlaceTy
<'tcx
, Self::Provenance
>,
580 _target
: Option
<BasicBlock
>,
581 _unwind
: UnwindAction
,
582 ) -> interpret
::InterpResult
<'tcx
> {
587 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
588 _msg
: &rustc_middle
::mir
::AssertMessage
<'tcx
>,
589 _unwind
: UnwindAction
,
590 ) -> interpret
::InterpResult
<'tcx
> {
595 _ecx
: &InterpCx
<'mir
, 'tcx
, Self>,
597 _left
: &rustc_const_eval
::interpret
::ImmTy
<'tcx
, Self::Provenance
>,
598 _right
: &rustc_const_eval
::interpret
::ImmTy
<'tcx
, Self::Provenance
>,
599 ) -> interpret
::InterpResult
<'tcx
, (interpret
::Scalar
<Self::Provenance
>, bool
, Ty
<'tcx
>)> {
600 throw_unsup
!(Unsupported("".into()))
604 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
605 _ptr
: interpret
::Pointer
<Self::Provenance
>,
606 ) -> interpret
::InterpResult
<'tcx
> {
611 _ecx
: &mut InterpCx
<'mir
, 'tcx
, Self>,
612 _frame
: rustc_const_eval
::interpret
::Frame
<'mir
, 'tcx
, Self::Provenance
>,
613 ) -> interpret
::InterpResult
<
615 rustc_const_eval
::interpret
::Frame
<'mir
, 'tcx
, Self::Provenance
, Self::FrameExtra
>,
621 _ecx
: &'a InterpCx
<'mir
, 'tcx
, Self>,
622 ) -> &'a
[rustc_const_eval
::interpret
::Frame
<'mir
, 'tcx
, Self::Provenance
, Self::FrameExtra
>]
628 _ecx
: &'a
mut InterpCx
<'mir
, 'tcx
, Self>,
630 rustc_const_eval
::interpret
::Frame
<'mir
, 'tcx
, Self::Provenance
, Self::FrameExtra
>,