]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir_transform/src/dataflow_const_prop.rs
New upstream version 1.70.0+dfsg1
[rustc.git] / compiler / rustc_mir_transform / src / dataflow_const_prop.rs
1 //! A constant propagation optimization pass based on dataflow analysis.
2 //!
3 //! Currently, this pass only propagates scalar values.
4
5 use rustc_const_eval::const_eval::CheckAlignment;
6 use rustc_const_eval::interpret::{ConstValue, ImmTy, Immediate, InterpCx, Scalar};
7 use rustc_data_structures::fx::FxHashMap;
8 use rustc_hir::def::DefKind;
9 use rustc_middle::mir::visit::{MutVisitor, Visitor};
10 use rustc_middle::mir::*;
11 use rustc_middle::ty::layout::TyAndLayout;
12 use rustc_middle::ty::{self, Ty, TyCtxt};
13 use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, ValueOrPlace};
14 use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects};
15 use rustc_span::DUMMY_SP;
16 use rustc_target::abi::{Align, FieldIdx, VariantIdx};
17
18 use crate::MirPass;
19
20 // These constants are somewhat random guesses and have not been optimized.
21 // If `tcx.sess.mir_opt_level() >= 4`, we ignore the limits (this can become very expensive).
22 const BLOCK_LIMIT: usize = 100;
23 const PLACE_LIMIT: usize = 100;
24
25 pub struct DataflowConstProp;
26
27 impl<'tcx> MirPass<'tcx> for DataflowConstProp {
28 fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
29 sess.mir_opt_level() >= 3
30 }
31
32 #[instrument(skip_all level = "debug")]
33 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
34 debug!(def_id = ?body.source.def_id());
35 if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT {
36 debug!("aborted dataflow const prop due too many basic blocks");
37 return;
38 }
39
40 // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators.
41 // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function
42 // applications, where `h` is the height of the lattice. Because the height of our lattice
43 // is linear w.r.t. the number of tracked places, this is `O(tracked_places * n)`. However,
44 // because every transfer function application could traverse the whole map, this becomes
45 // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of
46 // map nodes is strongly correlated to the number of tracked places, this becomes more or
47 // less `O(n)` if we place a constant limit on the number of tracked places.
48 let place_limit = if tcx.sess.mir_opt_level() < 4 { Some(PLACE_LIMIT) } else { None };
49
50 // Decide which places to track during the analysis.
51 let map = Map::from_filter(tcx, body, Ty::is_scalar, place_limit);
52
53 // Perform the actual dataflow analysis.
54 let analysis = ConstAnalysis::new(tcx, body, map);
55 let results = debug_span!("analyze")
56 .in_scope(|| analysis.wrap().into_engine(tcx, body).iterate_to_fixpoint());
57
58 // Collect results and patch the body afterwards.
59 let mut visitor = CollectAndPatch::new(tcx, &results.analysis.0.map);
60 debug_span!("collect").in_scope(|| results.visit_reachable_with(body, &mut visitor));
61 debug_span!("patch").in_scope(|| visitor.visit_body(body));
62 }
63 }
64
65 struct ConstAnalysis<'a, 'tcx> {
66 map: Map,
67 tcx: TyCtxt<'tcx>,
68 local_decls: &'a LocalDecls<'tcx>,
69 ecx: InterpCx<'tcx, 'tcx, DummyMachine>,
70 param_env: ty::ParamEnv<'tcx>,
71 }
72
73 impl<'tcx> ConstAnalysis<'_, 'tcx> {
74 fn eval_discriminant(
75 &self,
76 enum_ty: Ty<'tcx>,
77 variant_index: VariantIdx,
78 ) -> Option<ScalarTy<'tcx>> {
79 if !enum_ty.is_enum() {
80 return None;
81 }
82 let discr = enum_ty.discriminant_for_variant(self.tcx, variant_index)?;
83 let discr_layout = self.tcx.layout_of(self.param_env.and(discr.ty)).ok()?;
84 let discr_value = Scalar::try_from_uint(discr.val, discr_layout.size)?;
85 Some(ScalarTy(discr_value, discr.ty))
86 }
87 }
88
89 impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
90 type Value = FlatSet<ScalarTy<'tcx>>;
91
92 const NAME: &'static str = "ConstAnalysis";
93
94 fn map(&self) -> &Map {
95 &self.map
96 }
97
98 fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State<Self::Value>) {
99 match statement.kind {
100 StatementKind::SetDiscriminant { box ref place, variant_index } => {
101 state.flood_discr(place.as_ref(), &self.map);
102 if self.map.find_discr(place.as_ref()).is_some() {
103 let enum_ty = place.ty(self.local_decls, self.tcx).ty;
104 if let Some(discr) = self.eval_discriminant(enum_ty, variant_index) {
105 state.assign_discr(
106 place.as_ref(),
107 ValueOrPlace::Value(FlatSet::Elem(discr)),
108 &self.map,
109 );
110 }
111 }
112 }
113 _ => self.super_statement(statement, state),
114 }
115 }
116
117 fn handle_assign(
118 &self,
119 target: Place<'tcx>,
120 rvalue: &Rvalue<'tcx>,
121 state: &mut State<Self::Value>,
122 ) {
123 match rvalue {
124 Rvalue::Aggregate(kind, operands) => {
125 // If we assign `target = Enum::Variant#0(operand)`,
126 // we must make sure that all `target as Variant#i` are `Top`.
127 state.flood(target.as_ref(), self.map());
128
129 if let Some(target_idx) = self.map().find(target.as_ref()) {
130 let (variant_target, variant_index) = match **kind {
131 AggregateKind::Tuple | AggregateKind::Closure(..) => {
132 (Some(target_idx), None)
133 }
134 AggregateKind::Adt(def_id, variant_index, ..) => {
135 match self.tcx.def_kind(def_id) {
136 DefKind::Struct => (Some(target_idx), None),
137 DefKind::Enum => (
138 self.map.apply(target_idx, TrackElem::Variant(variant_index)),
139 Some(variant_index),
140 ),
141 _ => (None, None),
142 }
143 }
144 _ => (None, None),
145 };
146 if let Some(variant_target_idx) = variant_target {
147 for (field_index, operand) in operands.iter().enumerate() {
148 if let Some(field) = self.map().apply(
149 variant_target_idx,
150 TrackElem::Field(FieldIdx::from_usize(field_index)),
151 ) {
152 let result = self.handle_operand(operand, state);
153 state.insert_idx(field, result, self.map());
154 }
155 }
156 }
157 if let Some(variant_index) = variant_index
158 && let Some(discr_idx) = self.map().apply(target_idx, TrackElem::Discriminant)
159 {
160 // We are assigning the discriminant as part of an aggregate.
161 // This discriminant can only alias a variant field's value if the operand
162 // had an invalid value for that type.
163 // Using invalid values is UB, so we are allowed to perform the assignment
164 // without extra flooding.
165 let enum_ty = target.ty(self.local_decls, self.tcx).ty;
166 if let Some(discr_val) = self.eval_discriminant(enum_ty, variant_index) {
167 state.insert_value_idx(discr_idx, FlatSet::Elem(discr_val), &self.map);
168 }
169 }
170 }
171 }
172 Rvalue::CheckedBinaryOp(op, box (left, right)) => {
173 // Flood everything now, so we can use `insert_value_idx` directly later.
174 state.flood(target.as_ref(), self.map());
175
176 let target = self.map().find(target.as_ref());
177
178 let value_target = target
179 .and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into())));
180 let overflow_target = target
181 .and_then(|target| self.map().apply(target, TrackElem::Field(1_u32.into())));
182
183 if value_target.is_some() || overflow_target.is_some() {
184 let (val, overflow) = self.binary_op(state, *op, left, right);
185
186 if let Some(value_target) = value_target {
187 // We have flooded `target` earlier.
188 state.insert_value_idx(value_target, val, self.map());
189 }
190 if let Some(overflow_target) = overflow_target {
191 let overflow = match overflow {
192 FlatSet::Top => FlatSet::Top,
193 FlatSet::Elem(overflow) => {
194 self.wrap_scalar(Scalar::from_bool(overflow), self.tcx.types.bool)
195 }
196 FlatSet::Bottom => FlatSet::Bottom,
197 };
198 // We have flooded `target` earlier.
199 state.insert_value_idx(overflow_target, overflow, self.map());
200 }
201 }
202 }
203 _ => self.super_assign(target, rvalue, state),
204 }
205 }
206
207 fn handle_rvalue(
208 &self,
209 rvalue: &Rvalue<'tcx>,
210 state: &mut State<Self::Value>,
211 ) -> ValueOrPlace<Self::Value> {
212 match rvalue {
213 Rvalue::Cast(
214 kind @ (CastKind::IntToInt
215 | CastKind::FloatToInt
216 | CastKind::FloatToFloat
217 | CastKind::IntToFloat),
218 operand,
219 ty,
220 ) => match self.eval_operand(operand, state) {
221 FlatSet::Elem(op) => match kind {
222 CastKind::IntToInt | CastKind::IntToFloat => {
223 self.ecx.int_to_int_or_float(&op, *ty)
224 }
225 CastKind::FloatToInt | CastKind::FloatToFloat => {
226 self.ecx.float_to_float_or_int(&op, *ty)
227 }
228 _ => unreachable!(),
229 }
230 .map(|result| ValueOrPlace::Value(self.wrap_immediate(result, *ty)))
231 .unwrap_or(ValueOrPlace::top()),
232 _ => ValueOrPlace::top(),
233 },
234 Rvalue::BinaryOp(op, box (left, right)) => {
235 // Overflows must be ignored here.
236 let (val, _overflow) = self.binary_op(state, *op, left, right);
237 ValueOrPlace::Value(val)
238 }
239 Rvalue::UnaryOp(op, operand) => match self.eval_operand(operand, state) {
240 FlatSet::Elem(value) => self
241 .ecx
242 .unary_op(*op, &value)
243 .map(|val| ValueOrPlace::Value(self.wrap_immty(val)))
244 .unwrap_or(ValueOrPlace::Value(FlatSet::Top)),
245 FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom),
246 FlatSet::Top => ValueOrPlace::Value(FlatSet::Top),
247 },
248 Rvalue::Discriminant(place) => {
249 ValueOrPlace::Value(state.get_discr(place.as_ref(), self.map()))
250 }
251 _ => self.super_rvalue(rvalue, state),
252 }
253 }
254
255 fn handle_constant(
256 &self,
257 constant: &Constant<'tcx>,
258 _state: &mut State<Self::Value>,
259 ) -> Self::Value {
260 constant
261 .literal
262 .eval(self.tcx, self.param_env)
263 .try_to_scalar()
264 .map(|value| FlatSet::Elem(ScalarTy(value, constant.ty())))
265 .unwrap_or(FlatSet::Top)
266 }
267
268 fn handle_switch_int(
269 &self,
270 discr: &Operand<'tcx>,
271 apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
272 ) {
273 // FIXME: The dataflow framework only provides the state if we call `apply()`, which makes
274 // this more inefficient than it has to be.
275 let mut discr_value = None;
276 let mut handled = false;
277 apply_edge_effects.apply(|state, target| {
278 let discr_value = match discr_value {
279 Some(value) => value,
280 None => {
281 let value = match self.handle_operand(discr, state) {
282 ValueOrPlace::Value(value) => value,
283 ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
284 };
285 let result = match value {
286 FlatSet::Top => FlatSet::Top,
287 FlatSet::Elem(ScalarTy(scalar, _)) => {
288 let int = scalar.assert_int();
289 FlatSet::Elem(int.assert_bits(int.size()))
290 }
291 FlatSet::Bottom => FlatSet::Bottom,
292 };
293 discr_value = Some(result);
294 result
295 }
296 };
297
298 let FlatSet::Elem(choice) = discr_value else {
299 // Do nothing if we don't know which branch will be taken.
300 return
301 };
302
303 if target.value.map(|n| n == choice).unwrap_or(!handled) {
304 // Branch is taken. Has no effect on state.
305 handled = true;
306 } else {
307 // Branch is not taken.
308 state.mark_unreachable();
309 }
310 })
311 }
312 }
313
314 #[derive(Clone, PartialEq, Eq)]
315 struct ScalarTy<'tcx>(Scalar, Ty<'tcx>);
316
317 impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> {
318 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
319 // This is used for dataflow visualization, so we return something more concise.
320 std::fmt::Display::fmt(&ConstantKind::Val(ConstValue::Scalar(self.0), self.1), f)
321 }
322 }
323
324 impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
325 pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map) -> Self {
326 let param_env = tcx.param_env(body.source.def_id());
327 Self {
328 map,
329 tcx,
330 local_decls: &body.local_decls,
331 ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine),
332 param_env: param_env,
333 }
334 }
335
336 fn binary_op(
337 &self,
338 state: &mut State<FlatSet<ScalarTy<'tcx>>>,
339 op: BinOp,
340 left: &Operand<'tcx>,
341 right: &Operand<'tcx>,
342 ) -> (FlatSet<ScalarTy<'tcx>>, FlatSet<bool>) {
343 let left = self.eval_operand(left, state);
344 let right = self.eval_operand(right, state);
345 match (left, right) {
346 (FlatSet::Elem(left), FlatSet::Elem(right)) => {
347 match self.ecx.overflowing_binary_op(op, &left, &right) {
348 Ok((val, overflow, ty)) => (self.wrap_scalar(val, ty), FlatSet::Elem(overflow)),
349 _ => (FlatSet::Top, FlatSet::Top),
350 }
351 }
352 (FlatSet::Bottom, _) | (_, FlatSet::Bottom) => (FlatSet::Bottom, FlatSet::Bottom),
353 (_, _) => {
354 // Could attempt some algebraic simplifcations here.
355 (FlatSet::Top, FlatSet::Top)
356 }
357 }
358 }
359
360 fn eval_operand(
361 &self,
362 op: &Operand<'tcx>,
363 state: &mut State<FlatSet<ScalarTy<'tcx>>>,
364 ) -> FlatSet<ImmTy<'tcx>> {
365 let value = match self.handle_operand(op, state) {
366 ValueOrPlace::Value(value) => value,
367 ValueOrPlace::Place(place) => state.get_idx(place, &self.map),
368 };
369 match value {
370 FlatSet::Top => FlatSet::Top,
371 FlatSet::Elem(ScalarTy(scalar, ty)) => self
372 .tcx
373 .layout_of(self.param_env.and(ty))
374 .map(|layout| FlatSet::Elem(ImmTy::from_scalar(scalar, layout)))
375 .unwrap_or(FlatSet::Top),
376 FlatSet::Bottom => FlatSet::Bottom,
377 }
378 }
379
380 fn wrap_scalar(&self, scalar: Scalar, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
381 FlatSet::Elem(ScalarTy(scalar, ty))
382 }
383
384 fn wrap_immediate(&self, imm: Immediate, ty: Ty<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
385 match imm {
386 Immediate::Scalar(scalar) => self.wrap_scalar(scalar, ty),
387 _ => FlatSet::Top,
388 }
389 }
390
391 fn wrap_immty(&self, val: ImmTy<'tcx>) -> FlatSet<ScalarTy<'tcx>> {
392 self.wrap_immediate(*val, val.layout.ty)
393 }
394 }
395
396 struct CollectAndPatch<'tcx, 'map> {
397 tcx: TyCtxt<'tcx>,
398 map: &'map Map,
399
400 /// For a given MIR location, this stores the values of the operands used by that location. In
401 /// particular, this is before the effect, such that the operands of `_1 = _1 + _2` are
402 /// properly captured. (This may become UB soon, but it is currently emitted even by safe code.)
403 before_effect: FxHashMap<(Location, Place<'tcx>), ScalarTy<'tcx>>,
404
405 /// Stores the assigned values for assignments where the Rvalue is constant.
406 assignments: FxHashMap<Location, ScalarTy<'tcx>>,
407 }
408
409 impl<'tcx, 'map> CollectAndPatch<'tcx, 'map> {
410 fn new(tcx: TyCtxt<'tcx>, map: &'map Map) -> Self {
411 Self { tcx, map, before_effect: FxHashMap::default(), assignments: FxHashMap::default() }
412 }
413
414 fn make_operand(&self, scalar: ScalarTy<'tcx>) -> Operand<'tcx> {
415 Operand::Constant(Box::new(Constant {
416 span: DUMMY_SP,
417 user_ty: None,
418 literal: ConstantKind::Val(ConstValue::Scalar(scalar.0), scalar.1),
419 }))
420 }
421 }
422
423 impl<'mir, 'tcx, 'map> ResultsVisitor<'mir, 'tcx> for CollectAndPatch<'tcx, 'map> {
424 type FlowState = State<FlatSet<ScalarTy<'tcx>>>;
425
426 fn visit_statement_before_primary_effect(
427 &mut self,
428 state: &Self::FlowState,
429 statement: &'mir Statement<'tcx>,
430 location: Location,
431 ) {
432 match &statement.kind {
433 StatementKind::Assign(box (_, rvalue)) => {
434 OperandCollector { state, visitor: self }.visit_rvalue(rvalue, location);
435 }
436 _ => (),
437 }
438 }
439
440 fn visit_statement_after_primary_effect(
441 &mut self,
442 state: &Self::FlowState,
443 statement: &'mir Statement<'tcx>,
444 location: Location,
445 ) {
446 match statement.kind {
447 StatementKind::Assign(box (_, Rvalue::Use(Operand::Constant(_)))) => {
448 // Don't overwrite the assignment if it already uses a constant (to keep the span).
449 }
450 StatementKind::Assign(box (place, _)) => match state.get(place.as_ref(), self.map) {
451 FlatSet::Top => (),
452 FlatSet::Elem(value) => {
453 self.assignments.insert(location, value);
454 }
455 FlatSet::Bottom => {
456 // This assignment is either unreachable, or an uninitialized value is assigned.
457 }
458 },
459 _ => (),
460 }
461 }
462
463 fn visit_terminator_before_primary_effect(
464 &mut self,
465 state: &Self::FlowState,
466 terminator: &'mir Terminator<'tcx>,
467 location: Location,
468 ) {
469 OperandCollector { state, visitor: self }.visit_terminator(terminator, location);
470 }
471 }
472
473 impl<'tcx, 'map> MutVisitor<'tcx> for CollectAndPatch<'tcx, 'map> {
474 fn tcx<'a>(&'a self) -> TyCtxt<'tcx> {
475 self.tcx
476 }
477
478 fn visit_statement(&mut self, statement: &mut Statement<'tcx>, location: Location) {
479 if let Some(value) = self.assignments.get(&location) {
480 match &mut statement.kind {
481 StatementKind::Assign(box (_, rvalue)) => {
482 *rvalue = Rvalue::Use(self.make_operand(value.clone()));
483 }
484 _ => bug!("found assignment info for non-assign statement"),
485 }
486 } else {
487 self.super_statement(statement, location);
488 }
489 }
490
491 fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
492 match operand {
493 Operand::Copy(place) | Operand::Move(place) => {
494 if let Some(value) = self.before_effect.get(&(location, *place)) {
495 *operand = self.make_operand(value.clone());
496 }
497 }
498 _ => (),
499 }
500 }
501 }
502
503 struct OperandCollector<'tcx, 'map, 'a> {
504 state: &'a State<FlatSet<ScalarTy<'tcx>>>,
505 visitor: &'a mut CollectAndPatch<'tcx, 'map>,
506 }
507
508 impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> {
509 fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
510 match operand {
511 Operand::Copy(place) | Operand::Move(place) => {
512 match self.state.get(place.as_ref(), self.visitor.map) {
513 FlatSet::Top => (),
514 FlatSet::Elem(value) => {
515 self.visitor.before_effect.insert((location, *place), value);
516 }
517 FlatSet::Bottom => (),
518 }
519 }
520 _ => (),
521 }
522 }
523
524 fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
525 match rvalue {
526 Rvalue::Discriminant(place) => {
527 match self.state.get_discr(place.as_ref(), self.visitor.map) {
528 FlatSet::Top => (),
529 FlatSet::Elem(value) => {
530 self.visitor.before_effect.insert((location, *place), value);
531 }
532 FlatSet::Bottom => (),
533 }
534 }
535 _ => self.super_rvalue(rvalue, location),
536 }
537 }
538 }
539
540 struct DummyMachine;
541
542 impl<'mir, 'tcx> rustc_const_eval::interpret::Machine<'mir, 'tcx> for DummyMachine {
543 rustc_const_eval::interpret::compile_time_machine!(<'mir, 'tcx>);
544 type MemoryKind = !;
545 const PANIC_ON_ALLOC_FAIL: bool = true;
546
547 fn enforce_alignment(_ecx: &InterpCx<'mir, 'tcx, Self>) -> CheckAlignment {
548 unimplemented!()
549 }
550
551 fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>, _layout: TyAndLayout<'tcx>) -> bool {
552 unimplemented!()
553 }
554 fn alignment_check_failed(
555 _ecx: &InterpCx<'mir, 'tcx, Self>,
556 _has: Align,
557 _required: Align,
558 _check: CheckAlignment,
559 ) -> interpret::InterpResult<'tcx, ()> {
560 unimplemented!()
561 }
562
563 fn find_mir_or_eval_fn(
564 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
565 _instance: ty::Instance<'tcx>,
566 _abi: rustc_target::spec::abi::Abi,
567 _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
568 _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
569 _target: Option<BasicBlock>,
570 _unwind: UnwindAction,
571 ) -> interpret::InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> {
572 unimplemented!()
573 }
574
575 fn call_intrinsic(
576 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
577 _instance: ty::Instance<'tcx>,
578 _args: &[rustc_const_eval::interpret::OpTy<'tcx, Self::Provenance>],
579 _destination: &rustc_const_eval::interpret::PlaceTy<'tcx, Self::Provenance>,
580 _target: Option<BasicBlock>,
581 _unwind: UnwindAction,
582 ) -> interpret::InterpResult<'tcx> {
583 unimplemented!()
584 }
585
586 fn assert_panic(
587 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
588 _msg: &rustc_middle::mir::AssertMessage<'tcx>,
589 _unwind: UnwindAction,
590 ) -> interpret::InterpResult<'tcx> {
591 unimplemented!()
592 }
593
594 fn binary_ptr_op(
595 _ecx: &InterpCx<'mir, 'tcx, Self>,
596 _bin_op: BinOp,
597 _left: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
598 _right: &rustc_const_eval::interpret::ImmTy<'tcx, Self::Provenance>,
599 ) -> interpret::InterpResult<'tcx, (interpret::Scalar<Self::Provenance>, bool, Ty<'tcx>)> {
600 throw_unsup!(Unsupported("".into()))
601 }
602
603 fn expose_ptr(
604 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
605 _ptr: interpret::Pointer<Self::Provenance>,
606 ) -> interpret::InterpResult<'tcx> {
607 unimplemented!()
608 }
609
610 fn init_frame_extra(
611 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
612 _frame: rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance>,
613 ) -> interpret::InterpResult<
614 'tcx,
615 rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
616 > {
617 unimplemented!()
618 }
619
620 fn stack<'a>(
621 _ecx: &'a InterpCx<'mir, 'tcx, Self>,
622 ) -> &'a [rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>]
623 {
624 unimplemented!()
625 }
626
627 fn stack_mut<'a>(
628 _ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
629 ) -> &'a mut Vec<
630 rustc_const_eval::interpret::Frame<'mir, 'tcx, Self::Provenance, Self::FrameExtra>,
631 > {
632 unimplemented!()
633 }
634 }