]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_mir_transform/src/dataflow_const_prop.rs
New upstream version 1.68.2+dfsg1
[rustc.git] / compiler / rustc_mir_transform / src / dataflow_const_prop.rs
CommitLineData
487cf647
FG
1//! A constant propagation optimization pass based on dataflow analysis.
2//!
3//! Currently, this pass only propagates scalar values.
4
f25598a0 5use rustc_const_eval::const_eval::CheckAlignment;
487cf647
FG
6use rustc_const_eval::interpret::{ConstValue, ImmTy, Immediate, InterpCx, Scalar};
7use rustc_data_structures::fx::FxHashMap;
8use rustc_middle::mir::visit::{MutVisitor, Visitor};
9use rustc_middle::mir::*;
10use rustc_middle::ty::{self, Ty, TyCtxt};
11use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace};
12use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects};
13use rustc_span::DUMMY_SP;
f25598a0 14use rustc_target::abi::Align;
487cf647
FG
15
16use crate::MirPass;
17
18// These constants are somewhat random guesses and have not been optimized.
19// If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive).
20const BLOCK_LIMIT: usize = 100;
21const PLACE_LIMIT: usize = 100;
22
23pub struct DataflowConstProp;
24
25impl<'tcx> MirPass<'tcx> for DataflowConstProp {
26 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
27 sess.mir_opt_level() >= 3
28 }
29
30 #[instrument(skip_all level = "debug")]
31 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
32 if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
33 debug!("aborted dataflow const prop due too many basic blocks");
34 return;
35 }
36
37 // Decide which places to track during the analysis.
38 let map = Map::from_filter(tcx, body, Ty::is_scalar);
39
40 // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
41 // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
42 // applications, where `h` is the height of the lattice. Because the height of our lattice
43 // is linear w.r.t. the number of tracked places, this is `O(tracked_places * n)`. However,
44 // because every transfer function application could traverse the whole map, this becomes
45 // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
46 // map nodes is strongly correlated to the number of tracked places, this becomes more or
47 // less `O(n)` if we place a constant limit on the number of tracked places.
48 if tcx.sess.mir_opt_level() < 4 && map.tracked_places() > PLACE_LIMIT {
49 debug!("aborted dataflow const prop due to too many tracked places");
50 return;
51 }
52
53 // Perform the actual dataflow analysis.
54 let analysis = ConstAnalysis::new(tcx, body, map);
55 let results = debug_span!("analyze")
56 .in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint());
57
58 // Collect results and patch the body afterwards.
59 let mut visitor = CollectAndPatch::new(tcx, &results.analysis.0.map);
60 debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor));
61 debug_span!("patch").in_scope(|| visitor.visit_body(body));
62 }
63}
64
65struct ConstAnalysis<'tcx> {
66 map: Map,
67 tcx: TyCtxt<'tcx>,
68 ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
69 param_env: ty::ParamEnv<'tcx>,
70}
71
72impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> {
73 type Value = FlatSet<ScalarTy<'tcx>>;
74
75 const NAME: &'static str = "ConstAnalysis";
76
77 fn map(&self) -> &Map {
78 &self.map
79 }
80
81 fn handle_assign(
82 &self,
83 target: Place<'tcx>,
84 rvalue: &Rvalue<'tcx>,
85 state: &mut State<Self::Value>,
86 ) {
87 match rvalue {
88 Rvalue::CheckedBinaryOp(op, box (left, right)) => {
89 let target = self.map().find(target.as_ref());
90 if let Some(target) = target {
91 // We should not track any projections other than
92 // what is overwritten below, but just in case...
93 state.flood_idx(target, self.map());
94 }
95
96 let value_target = target
97 .and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into())));
98 let overflow_target = target
99 .and_then(|target| self.map().apply(target, TrackElem::Field(1_u32.into())));
100
101 if value_target.is_some() || overflow_target.is_some() {
102 let (val, overflow) = self.binary_op(state, *op, left, right);
103
104 if let Some(value_target) = value_target {
105 state.assign_idx(value_target, ValueOrPlace::Value(val), self.map());
106 }
107 if let Some(overflow_target) = overflow_target {
108 let overflow = match overflow {
109 FlatSet::Top => FlatSet::Top,
110 FlatSet::Elem(overflow) => {
111 if overflow {
112 // Overflow cannot be reliably propagated. See: https://github.com/rust-lang/rust/pull/101168#issuecomment-1288091446
113 FlatSet::Top
114 } else {
115 self.wrap_scalar(Scalar::from_bool(false), self.tcx.types.bool)
116 }
117 }
118 FlatSet::Bottom => FlatSet::Bottom,
119 };
120 state.assign_idx(
121 overflow_target,
122 ValueOrPlace::Value(overflow),
123 self.map(),
124 );
125 }
126 }
127 }
128 _ => self.super_assign(target, rvalue, state),
129 }
130 }
131
132 fn handle_rvalue(
133 &self,
134 rvalue: &Rvalue<'tcx>,
135 state: &mut State<Self::Value>,
136 ) -> ValueOrPlace<Self::Value> {
137 match rvalue {
138 Rvalue::Cast(
139 kind @ (CastKind::IntToInt
140 | CastKind::FloatToInt
141 | CastKind::FloatToFloat
142 | CastKind::IntToFloat),
143 operand,
144 ty,
145 ) => match self.eval_operand(operand, state) {
146 FlatSet::Elem(op) => match kind {
147 CastKind::IntToInt | CastKind::IntToFloat => {
148 self.ecx.int_to_int_or_float(&op, *ty)
149 }
150 CastKind::FloatToInt | CastKind::FloatToFloat => {
151 self.ecx.float_to_float_or_int(&op, *ty)
152 }
153 _ => unreachable!(),
154 }
155 .map(|result| ValueOrPlace::Value(self.wrap_immediate(result, *ty)))
156 .unwrap_or(ValueOrPlace::top()),
157 _ => ValueOrPlace::top(),
158 },
159 Rvalue::BinaryOp(op, box (left, right)) => {
160 // Overflows must be ignored here.
161 let (val, _overflow) = self.binary_op(state, *op, left, right);
162 ValueOrPlace::Value(val)
163 }
164 Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) {
165 FlatSet::Elem(value) => self
166 .ecx
167 .unary_op(*op, &value)
168 .map(|val| ValueOrPlace::Value(self.wrap_immty(val)))
169 .unwrap_or(ValueOrPlace::Value(FlatSet::Top)),
170 FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
171 FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
172 },
173 _ => self.super_rvalue(rvalue, state),
174 }
175 }
176
177 fn handle_constant(
178 &self,
179 constant: &Constant<'tcx>,
180 _state: &mut State<Self::Value>,
181 ) -> Self::Value {
182 constant
183 .literal
184 .eval(self.tcx, self.param_env)
185 .try_to_scalar()
186 .map(|value| FlatSet::Elem(ScalarTy(value, constant.ty())))
187 .unwrap_or(FlatSet::Top)
188 }
189
190 fn handle_switch_int(
191 &self,
192 discr: &Operand<'tcx>,
193 apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
194 ) {
195 // FIXME: The dataflow framework only provides the state if we call `apply()`, which makes
196 // this more inefficient than it has to be.
197 let mut discr_value = None;
198 let mut handled = false;
199 apply_edge_effects.apply(|state, target| {
200 let discr_value = match discr_value {
201 Some(value) => value,
202 None => {
203 let value = match self.handle_operand(discr, state) {
204 ValueOrPlace::Value(value) => value,
205 ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
206 };
207 let result = match value {
208 FlatSet::Top => FlatSet::Top,
209 FlatSet::Elem(ScalarTy(scalar, _)) => {
210 let int = scalar.assert_int();
211 FlatSet::Elem(int.assert_bits(int.size()))
212 }
213 FlatSet::Bottom => FlatSet::Bottom,
214 };
215 discr_value = Some(result);
216 result
217 }
218 };
219
220 let FlatSet::Elem(choice) = discr_value else {
221 // Do nothing if we don't know which branch will be taken.
222 return
223 };
224
225 if target.value.map(|n| n == choice).unwrap_or(!handled) {
226 // Branch is taken. Has no effect on state.
227 handled = true;
228 } else {
229 // Branch is not taken.
230 state.mark_unreachable();
231 }
232 })
233 }
234}
235
236#[derive(Clone, PartialEq, Eq)]
237struct ScalarTy<'tcx>(Scalar, Ty<'tcx>);
238
239impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> {
240 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
241 // This is used for dataflow visualization, so we return something more concise.
242 std::fmt::Display::fmt(&ConstantKind::Val(ConstValue::Scalar(self.0), self.1), f)
243 }
244}
245
246impl<'tcx> ConstAnalysis<'tcx> {
247 pub fn new(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, map: Map) -> Self {
248 let param_env = tcx.param_env(body.source.def_id());
249 Self {
250 map,
251 tcx,
252 ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
253 param_env: param_env,
254 }
255 }
256
257 fn binary_op(
258 &self,
259 state: &mut State<FlatSet<ScalarTy<'tcx>>>,
260 op: BinOp,
261 left: &Operand<'tcx>,
262 right: &Operand<'tcx>,
263 ) -> (FlatSet<ScalarTy<'tcx>>, FlatSet<bool>) {
264 let left = self.eval_operand(left, state);
265 let right = self.eval_operand(right, state);
266 match (left, right) {
267 (FlatSet::Elem(left), FlatSet::Elem(right)) => {
268 match self.ecx.overflowing_binary_op(op, &left, &right) {
269 Ok((val, overflow, ty)) => (self.wrap_scalar(val, ty), FlatSet::Elem(overflow)),
270 _ => (FlatSet::Top, FlatSet::Top),
271 }
272 }
273 (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
274 (_, _) => {
275 // Could attempt some algebraic simplifcations here.
276 (FlatSet::Top, FlatSet::Top)
277 }
278 }
279 }
280
281 fn eval_operand(
282 &self,
283 op: &Operand<'tcx>,
284 state: &mut State<FlatSet<ScalarTy<'tcx>>>,
285 ) -> FlatSet<ImmTy<'tcx>> {
286 let value = match self.handle_operand(op, state) {
287 ValueOrPlace::Value(value) => value,
288 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
289 };
290 match value {
291 FlatSet::Top => FlatSet::Top,
292 FlatSet::Elem(ScalarTy(scalar, ty)) => self
293 .tcx
294 .layout_of(self.param_env.and(ty))
295 .map(|layout| FlatSet::Elem(ImmTy::from_scalar(scalar, layout)))
296 .unwrap_or(FlatSet::Top),
297 FlatSet::Bottom => FlatSet::Bottom,
298 }
299 }
300
301 fn wrap_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
302 FlatSet::Elem(ScalarTy(scalar, ty))
303 }
304
305 fn wrap_immediate(&self, imm: Immediate, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
306 match imm {
307 Immediate::Scalar(scalar) => self.wrap_scalar(scalar, ty),
308 _ => FlatSet::Top,
309 }
310 }
311
312 fn wrap_immty(&self, val: ImmTy<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
313 self.wrap_immediate(*val, val.layout.ty)
314 }
315}
316
317struct CollectAndPatch<'tcx, 'map> {
318 tcx: TyCtxt<'tcx>,
319 map: &'map Map,
320
321 /// For a given MIR location, this stores the values of the operands used by that location. In
322 /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
323 /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
324 before_effect: FxHashMap<(Location, Place<'tcx>), ScalarTy<'tcx>>,
325
326 /// Stores the assigned values for assignments where the Rvalue is constant.
327 assignments: FxHashMap<Location, ScalarTy<'tcx>>,
328}
329
330impl<'tcx, 'map> CollectAndPatch<'tcx, 'map> {
331 fn new(tcx: TyCtxt<'tcx>, map: &'map Map) -> Self {
332 Self { tcx, map, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
333 }
334
335 fn make_operand(&self, scalar: ScalarTy<'tcx>) -> Operand<'tcx> {
336 Operand::Constant(Box::new(Constant {
337 span: DUMMY_SP,
338 user_ty: None,
339 literal: ConstantKind::Val(ConstValue::Scalar(scalar.0), scalar.1),
340 }))
341 }
342}
343
344impl<'mir, 'tcx, 'map> ResultsVisitor<'mir, 'tcx> for CollectAndPatch<'tcx, 'map> {
345 type FlowState = State<FlatSet<ScalarTy<'tcx>>>;
346
347 fn visit_statement_before_primary_effect(
348 &mut self,
349 state: &Self::FlowState,
350 statement: &'mir Statement<'tcx>,
351 location: Location,
352 ) {
353 match &statement.kind {
354 StatementKind::Assign(box (_, rvalue)) => {
355 OperandCollector { state, visitor: self }.visit_rvalue(rvalue, location);
356 }
357 _ => (),
358 }
359 }
360
361 fn visit_statement_after_primary_effect(
362 &mut self,
363 state: &Self::FlowState,
364 statement: &'mir Statement<'tcx>,
365 location: Location,
366 ) {
367 match statement.kind {
368 StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_)))) => {
369 // Don't overwrite the assignment if it already uses a constant (to keep the span).
370 }
371 StatementKind::Assign(box (place, _)) => match state.get(place.as_ref(), self.map) {
372 FlatSet::Top => (),
373 FlatSet::Elem(value) => {
374 self.assignments.insert(location, value);
375 }
376 FlatSet::Bottom => {
377 // This assignment is either unreachable, or an uninitialized value is assigned.
378 }
379 },
380 _ => (),
381 }
382 }
383
384 fn visit_terminator_before_primary_effect(
385 &mut self,
386 state: &Self::FlowState,
387 terminator: &'mir Terminator<'tcx>,
388 location: Location,
389 ) {
390 OperandCollector { state, visitor: self }.visit_terminator(terminator, location);
391 }
392}
393
394impl<'tcx, 'map> MutVisitor<'tcx> for CollectAndPatch<'tcx, 'map> {
395 fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
396 self.tcx
397 }
398
399 fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
400 if let Some(value) = self.assignments.get(&location) {
401 match &mut statement.kind {
402 StatementKind::Assign(box (_, rvalue)) => {
403 *rvalue = Rvalue::Use(self.make_operand(value.clone()));
404 }
405 _ => bug!("found assignment info for non-assign statement"),
406 }
407 } else {
408 self.super_statement(statement, location);
409 }
410 }
411
412 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
413 match operand {
414 Operand::Copy(place) | Operand::Move(place) => {
415 if let Some(value) = self.before_effect.get(&(location, *place)) {
416 *operand = self.make_operand(value.clone());
417 }
418 }
419 _ => (),
420 }
421 }
422}
423
424struct OperandCollector<'tcx, 'map, 'a> {
425 state: &'a State<FlatSet<ScalarTy<'tcx>>>,
426 visitor: &'a mut CollectAndPatch<'tcx, 'map>,
427}
428
429impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
430 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
431 match operand {
432 Operand::Copy(place) | Operand::Move(place) => {
433 match self.state.get(place.as_ref(), self.visitor.map) {
434 FlatSet::Top => (),
435 FlatSet::Elem(value) => {
436 self.visitor.before_effect.insert((location, *place), value);
437 }
438 FlatSet::Bottom => (),
439 }
440 }
441 _ => (),
442 }
443 }
444}
445
446struct DummyMachine;
447
448impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine {
449 rustc_const_eval::interpret::compile_time_machine!(<'mir, 'tcx>);
450 type MemoryKind = !;
451 const PANIC_ON_ALLOC_FAIL: bool = true;
452
f25598a0 453 fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment {
487cf647
FG
454 unimplemented!()
455 }
456
457 fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
458 unimplemented!()
459 }
f25598a0
FG
460 fn alignment_check_failed(
461 _ecx: &InterpCx<'mir, 'tcx, Self>,
462 _has: Align,
463 _required: Align,
464 _check: CheckAlignment,
465 ) -> interpret::InterpResult<'tcx, ()> {
466 unimplemented!()
467 }
487cf647
FG
468
469 fn find_mir_or_eval_fn(
470 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
471 _instance: ty::Instance<'tcx>,
472 _abi: rustc_target::spec::abi::Abi,
473 _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
474 _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
475 _target: Option<BasicBlock>,
476 _unwind: rustc_const_eval::interpret::StackPopUnwind,
477 ) -> interpret::InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> {
478 unimplemented!()
479 }
480
481 fn call_intrinsic(
482 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
483 _instance: ty::Instance<'tcx>,
484 _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
485 _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
486 _target: Option<BasicBlock>,
487 _unwind: rustc_const_eval::interpret::StackPopUnwind,
488 ) -> interpret::InterpResult<'tcx> {
489 unimplemented!()
490 }
491
492 fn assert_panic(
493 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
494 _msg: &rustc_middle::mir::AssertMessage<'tcx>,
495 _unwind: Option<BasicBlock>,
496 ) -> interpret::InterpResult<'tcx> {
497 unimplemented!()
498 }
499
500 fn binary_ptr_op(
501 _ecx: &InterpCx<'mir, 'tcx, Self>,
502 _bin_op: BinOp,
503 _left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
504 _right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
505 ) -> interpret::InterpResult<'tcx, (interpret::Scalar<Self::Provenance>, bool, Ty<'tcx>)> {
506 throw_unsup!(Unsupported("".into()))
507 }
508
509 fn expose_ptr(
510 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
511 _ptr: interpret::Pointer<Self::Provenance>,
512 ) -> interpret::InterpResult<'tcx> {
513 unimplemented!()
514 }
515
516 fn init_frame_extra(
517 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
518 _frame: rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance>,
519 ) -> interpret::InterpResult<
520 'tcx,
521 rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
522 > {
523 unimplemented!()
524 }
525
526 fn stack<'a>(
527 _ecx: &'a InterpCx<'mir, 'tcx, Self>,
528 ) -> &'a [rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
529 {
530 unimplemented!()
531 }
532
533 fn stack_mut<'a>(
534 _ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
535 ) -> &'a mut Vec<
536 rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
537 > {
538 unimplemented!()
539 }
540}