]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/interpret/step.rs
Merge 1.70 into proxmox/bookworm
[rustc.git] / compiler / rustc_const_eval / src / interpret / step.rs
CommitLineData
416331ca 1//! This module contains the `InterpCx` methods for executing a single step of the interpreter.
ff7c6d11
XL
2//!
3//! The main entry point is the `step` method.
4
487cf647
FG
5use either::Either;
6
ba9703b0
XL
7use rustc_middle::mir;
8use rustc_middle::mir::interpret::{InterpResult, Scalar};
c295e0f8 9use rustc_middle::ty::layout::LayoutOf;
ff7c6d11 10
487cf647 11use super::{ImmTy, InterpCx, Machine};
ff7c6d11 12
0731742a 13/// Classify whether an operator is "left-homogeneous", i.e., the LHS has the
b7449926
XL
14/// same type as the result.
15#[inline]
16fn binop_left_homogeneous(op: mir::BinOp) -> bool {
ba9703b0 17 use rustc_middle::mir::BinOp::*;
b7449926 18 match op {
dfeec247
XL
19 Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Offset | Shl | Shr => true,
20 Eq | Ne | Lt | Le | Gt | Ge => false,
b7449926
XL
21 }
22}
0731742a 23/// Classify whether an operator is "right-homogeneous", i.e., the RHS has the
b7449926
XL
24/// same type as the LHS.
25#[inline]
26fn binop_right_homogeneous(op: mir::BinOp) -> bool {
ba9703b0 27 use rustc_middle::mir::BinOp::*;
b7449926 28 match op {
dfeec247
XL
29 Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Eq | Ne | Lt | Le | Gt | Ge => true,
30 Offset | Shl | Shr => false,
b7449926
XL
31 }
32}
33
ba9703b0 34impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
9fa01778 35 /// Returns `true` as long as there are more things to do.
0bf4aa26
XL
36 ///
37 /// This is used by [priroda](https://github.com/oli-obk/priroda)
74b04a01 38 ///
5e7ed085 39 /// This is marked `#inline(always)` to work around adversarial codegen when `opt-level = 3`
74b04a01 40 #[inline(always)]
dc9dc135 41 pub fn step(&mut self) -> InterpResult<'tcx, bool> {
ba9703b0 42 if self.stack().is_empty() {
ff7c6d11
XL
43 return Ok(false);
44 }
45
487cf647 46 let Either::Left(loc) = self.frame().loc else {
5e7ed085
FG
47 // We are unwinding and this fn has no cleanup code.
48 // Just go on unwinding.
49 trace!("unwinding: skipping frame");
50 self.pop_stack_frame(/* unwinding */ true)?;
51 return Ok(true);
60c5eb7d 52 };
f2b60f7d 53 let basic_block = &self.body().basic_blocks[loc.block];
ff7c6d11 54
f9f354fc 55 if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
923072b8 56 let old_frames = self.frame_idx();
0531ce1d 57 self.statement(stmt)?;
923072b8
FG
58 // Make sure we are not updating `statement_index` of the wrong frame.
59 assert_eq!(old_frames, self.frame_idx());
60 // Advance the program counter.
487cf647 61 self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
ff7c6d11
XL
62 return Ok(true);
63 }
64
0bf4aa26 65 M::before_terminator(self)?;
0531ce1d 66
ff7c6d11 67 let terminator = basic_block.terminator();
0531ce1d 68 self.terminator(terminator)?;
ff7c6d11
XL
69 Ok(true)
70 }
71
3dfed10e 72 /// Runs the interpretation logic for the given `mir::Statement` at the current frame and
923072b8
FG
73 /// statement counter.
74 ///
75 /// This does NOT move the statement counter forward, the caller has to do that!
c295e0f8 76 pub fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
0731742a 77 info!("{:?}", stmt);
ff7c6d11 78
ba9703b0 79 use rustc_middle::mir::StatementKind::*;
ff7c6d11 80
ba9703b0
XL
81 match &stmt.kind {
82 Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
ff7c6d11 83
ba9703b0
XL
84 SetDiscriminant { place, variant_index } => {
85 let dest = self.eval_place(**place)?;
6a06907d 86 self.write_discriminant(*variant_index, &dest)?;
ff7c6d11
XL
87 }
88
04454e1e
FG
89 Deinit(place) => {
90 let dest = self.eval_place(**place)?;
91 self.write_uninit(&dest)?;
92 }
93
ff7c6d11
XL
94 // Mark locals as alive
95 StorageLive(local) => {
fc512014 96 self.storage_live(*local)?;
ff7c6d11
XL
97 }
98
99 // Mark locals as dead
100 StorageDead(local) => {
fc512014 101 self.storage_dead(*local)?;
ff7c6d11
XL
102 }
103
0bf4aa26 104 // No dynamic semantics attached to `FakeRead`; MIR
94b46f34 105 // interpreter is solely intended for borrowck'ed code.
0bf4aa26 106 FakeRead(..) => {}
94b46f34 107
a1dfa0c6 108 // Stacked Borrows.
ba9703b0
XL
109 Retag(kind, place) => {
110 let dest = self.eval_place(**place)?;
487cf647 111 M::retag_place_contents(self, *kind, &dest)?;
6a06907d
XL
112 }
113
9c376795 114 Intrinsic(box intrinsic) => self.emulate_nondiverging_intrinsic(intrinsic)?,
ff7c6d11 115
a1dfa0c6 116 // Statements we do not track.
353b0b11 117 PlaceMention(..) | AscribeUserType(..) => {}
0531ce1d 118
3dfed10e
XL
119 // Currently, Miri discards Coverage statements. Coverage statements are only injected
120 // via an optional compile time MIR pass and have no side effects. Since Coverage
121 // statements don't exist at the source level, it is safe for Miri to ignore them, even
122 // for undefined behavior (UB) checks.
123 //
124 // A coverage counter inside a const expression (for example, a counter injected in a
125 // const function) is discarded when the const is evaluated at compile time. Whether
126 // this should change, and/or how to implement a const eval counter, is a subject of the
127 // following issue:
128 //
129 // FIXME(#73156): Handle source code coverage in const eval
130 Coverage(..) => {}
131
9ffffee4
FG
132 ConstEvalCounter => {
133 M::increment_const_eval_counter(self)?;
134 }
135
ff7c6d11
XL
136 // Defined to do nothing. These are added by optimization passes, to avoid changing the
137 // size of MIR constantly.
138 Nop => {}
ff7c6d11
XL
139 }
140
ff7c6d11
XL
141 Ok(())
142 }
143
b7449926
XL
144 /// Evaluate an assignment statement.
145 ///
146 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
147 /// type writes its results directly into the memory specified by the place.
e74abb32 148 pub fn eval_rvalue_into_place(
b7449926
XL
149 &mut self,
150 rvalue: &mir::Rvalue<'tcx>,
ba9703b0 151 place: mir::Place<'tcx>,
dc9dc135 152 ) -> InterpResult<'tcx> {
b7449926 153 let dest = self.eval_place(place)?;
923072b8
FG
154 // FIXME: ensure some kind of non-aliasing between LHS and RHS?
155 // Also see https://github.com/rust-lang/rust/issues/68364.
b7449926 156
ba9703b0 157 use rustc_middle::mir::Rvalue::*;
b7449926 158 match *rvalue {
f9f354fc 159 ThreadLocalRef(did) => {
136023e0
XL
160 let ptr = M::thread_local_static_base_pointer(self, did)?;
161 self.write_pointer(ptr, &dest)?;
f9f354fc
XL
162 }
163
b7449926
XL
164 Use(ref operand) => {
165 // Avoid recomputing the layout
166 let op = self.eval_operand(operand, Some(dest.layout))?;
064997fb
FG
167 self.copy_op(&op, &dest, /*allow_transmute*/ false)?;
168 }
169
9c376795
FG
170 CopyForDeref(place) => {
171 let op = self.eval_place_to_op(place, Some(dest.layout))?;
064997fb 172 self.copy_op(&op, &dest, /* allow_transmute*/ false)?;
b7449926
XL
173 }
174
6a06907d 175 BinaryOp(bin_op, box (ref left, ref right)) => {
60c5eb7d 176 let layout = binop_left_homogeneous(bin_op).then_some(dest.layout);
6a06907d 177 let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
60c5eb7d 178 let layout = binop_right_homogeneous(bin_op).then_some(left.layout);
6a06907d
XL
179 let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
180 self.binop_ignore_overflow(bin_op, &left, &right, &dest)?;
b7449926
XL
181 }
182
6a06907d 183 CheckedBinaryOp(bin_op, box (ref left, ref right)) => {
b7449926 184 // Due to the extra boolean in the result, we can never reuse the `dest.layout`.
6a06907d 185 let left = self.read_immediate(&self.eval_operand(left, None)?)?;
60c5eb7d 186 let layout = binop_right_homogeneous(bin_op).then_some(left.layout);
6a06907d 187 let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
9ffffee4 188 self.binop_with_overflow(bin_op, &left, &right, &dest)?;
b7449926
XL
189 }
190
191 UnaryOp(un_op, ref operand) => {
192 // The operand always has the same type as the result.
6a06907d
XL
193 let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
194 let val = self.unary_op(un_op, &val)?;
e1599b0c 195 assert_eq!(val.layout, dest.layout, "layout mismatch for result of {:?}", un_op);
6a06907d 196 self.write_immediate(*val, &dest)?;
b7449926
XL
197 }
198
04454e1e 199 Aggregate(box ref kind, ref operands) => {
9ffffee4 200 self.write_aggregate(kind, operands, &dest)?;
b7449926
XL
201 }
202
203 Repeat(ref operand, _) => {
17df50a5 204 let src = self.eval_operand(operand, None)?;
487cf647 205 assert!(src.layout.is_sized());
6a06907d 206 let dest = self.force_allocation(&dest)?;
a1dfa0c6 207 let length = dest.len(self)?;
b7449926 208
17df50a5
XL
209 if length == 0 {
210 // Nothing to copy... but let's still make sure that `dest` as a place is valid.
04454e1e 211 self.get_place_alloc_mut(&dest)?;
17df50a5
XL
212 } else {
213 // Write the src to the first element.
6a06907d 214 let first = self.mplace_field(&dest, 0)?;
064997fb 215 self.copy_op(&src, &first.into(), /*allow_transmute*/ false)?;
17df50a5
XL
216
217 // This is performance-sensitive code for big static/const arrays! So we
218 // avoid writing each operand individually and instead just make many copies
219 // of the first element.
220 let elem_size = first.layout.size;
221 let first_ptr = first.ptr;
136023e0 222 let rest_ptr = first_ptr.offset(elem_size, self)?;
a2a8927a
XL
223 // For the alignment of `rest_ptr`, we crucially do *not* use `first.align` as
224 // that place might be more aligned than its type mandates (a `u8` array could
225 // be 4-aligned if it sits at the right spot in a struct). Instead we use
226 // `first.layout.align`, i.e., the alignment given by the type.
04454e1e 227 self.mem_copy_repeatedly(
17df50a5
XL
228 first_ptr,
229 first.align,
230 rest_ptr,
a2a8927a 231 first.layout.align.abi,
17df50a5
XL
232 elem_size,
233 length - 1,
234 /*nonoverlapping:*/ true,
235 )?;
b7449926
XL
236 }
237 }
238
ba9703b0 239 Len(place) => {
b7449926 240 let src = self.eval_place(place)?;
f2b60f7d
FG
241 let op = self.place_to_op(&src)?;
242 let len = op.len(self)?;
9ffffee4 243 self.write_scalar(Scalar::from_target_usize(len, self), &dest)?;
b7449926
XL
244 }
245
487cf647 246 Ref(_, borrow_kind, place) => {
b7449926 247 let src = self.eval_place(place)?;
6a06907d 248 let place = self.force_allocation(&src)?;
487cf647
FG
249 let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
250 // A fresh reference was created, make sure it gets retagged.
251 let val = M::retag_ptr_value(
252 self,
253 if borrow_kind.allows_two_phase_borrow() {
254 mir::RetagKind::TwoPhase
255 } else {
256 mir::RetagKind::Default
257 },
258 &val,
259 )?;
260 self.write_immediate(*val, &dest)?;
261 }
262
263 AddressOf(_, place) => {
264 // Figure out whether this is an addr_of of an already raw place.
265 let place_base_raw = if place.has_deref() {
266 let ty = self.frame().body.local_decls[place.local].ty;
267 ty.is_unsafe_ptr()
268 } else {
269 // Not a deref, and thus not raw.
270 false
271 };
272
273 let src = self.eval_place(place)?;
274 let place = self.force_allocation(&src)?;
275 let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
276 if !place_base_raw {
277 // If this was not already raw, it needs retagging.
278 val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
279 }
280 self.write_immediate(*val, &dest)?;
b7449926
XL
281 }
282
c295e0f8 283 NullaryOp(null_op, ty) => {
a2a8927a 284 let ty = self.subst_from_current_frame_and_normalize_erasing_regions(ty)?;
b7449926 285 let layout = self.layout_of(ty)?;
5869c6ff
XL
286 if layout.is_unsized() {
287 // FIXME: This should be a span_bug (#80742)
288 self.tcx.sess.delay_span_bug(
289 self.frame().current_span(),
c295e0f8 290 &format!("Nullary MIR operator called for unsized type {}", ty),
5869c6ff
XL
291 );
292 throw_inval!(SizeOfUnsizedType(ty));
293 }
c295e0f8
XL
294 let val = match null_op {
295 mir::NullOp::SizeOf => layout.size.bytes(),
296 mir::NullOp::AlignOf => layout.align.abi.bytes(),
c295e0f8 297 };
9ffffee4 298 self.write_scalar(Scalar::from_target_usize(val, self), &dest)?;
c295e0f8
XL
299 }
300
301 ShallowInitBox(ref operand, _) => {
302 let src = self.eval_operand(operand, None)?;
303 let v = self.read_immediate(&src)?;
304 self.write_immediate(*v, &dest)?;
b7449926
XL
305 }
306
f9f354fc 307 Cast(cast_kind, ref operand, cast_ty) => {
b7449926 308 let src = self.eval_operand(operand, None)?;
a2a8927a
XL
309 let cast_ty =
310 self.subst_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
6a06907d 311 self.cast(&src, cast_kind, cast_ty, &dest)?;
b7449926
XL
312 }
313
ba9703b0 314 Discriminant(place) => {
9fa01778 315 let op = self.eval_place_to_op(place, None)?;
6a06907d
XL
316 let discr_val = self.read_discriminant(&op)?.0;
317 self.write_scalar(discr_val, &dest)?;
b7449926
XL
318 }
319 }
320
3dfed10e 321 trace!("{:?}", self.dump_place(*dest));
b7449926
XL
322
323 Ok(())
324 }
325
923072b8 326 /// Evaluate the given terminator. Will also adjust the stack frame and statement position accordingly.
dc9dc135 327 fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
0731742a 328 info!("{:?}", terminator.kind);
60c5eb7d 329
ff7c6d11 330 self.eval_terminator(terminator)?;
ba9703b0 331 if !self.stack().is_empty() {
487cf647 332 if let Either::Left(loc) = self.frame().loc {
f9f354fc 333 info!("// executing {:?}", loc.block);
60c5eb7d 334 }
ff7c6d11
XL
335 }
336 Ok(())
337 }
ff7c6d11 338}