]>
Commit | Line | Data |
---|---|---|
416331ca | 1 | //! This module contains the `InterpCx` methods for executing a single step of the interpreter. |
ff7c6d11 XL |
2 | //! |
3 | //! The main entry point is the `step` method. | |
4 | ||
ba9703b0 XL |
5 | use rustc_middle::mir; |
6 | use rustc_middle::mir::interpret::{InterpResult, Scalar}; | |
c295e0f8 | 7 | use rustc_middle::ty::layout::LayoutOf; |
ff7c6d11 | 8 | |
416331ca | 9 | use super::{InterpCx, Machine}; |
ff7c6d11 | 10 | |
0731742a | 11 | /// Classify whether an operator is "left-homogeneous", i.e., the LHS has the |
b7449926 XL |
12 | /// same type as the result. |
13 | #[inline] | |
14 | fn binop_left_homogeneous(op: mir::BinOp) -> bool { | |
ba9703b0 | 15 | use rustc_middle::mir::BinOp::*; |
b7449926 | 16 | match op { |
dfeec247 XL |
17 | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Offset | Shl | Shr => true, |
18 | Eq | Ne | Lt | Le | Gt | Ge => false, | |
b7449926 XL |
19 | } |
20 | } | |
0731742a | 21 | /// Classify whether an operator is "right-homogeneous", i.e., the RHS has the |
b7449926 XL |
22 | /// same type as the LHS. |
23 | #[inline] | |
24 | fn binop_right_homogeneous(op: mir::BinOp) -> bool { | |
ba9703b0 | 25 | use rustc_middle::mir::BinOp::*; |
b7449926 | 26 | match op { |
dfeec247 XL |
27 | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Eq | Ne | Lt | Le | Gt | Ge => true, |
28 | Offset | Shl | Shr => false, | |
b7449926 XL |
29 | } |
30 | } | |
31 | ||
ba9703b0 | 32 | impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { |
dc9dc135 | 33 | pub fn run(&mut self) -> InterpResult<'tcx> { |
b7449926 XL |
34 | while self.step()? {} |
35 | Ok(()) | |
ff7c6d11 XL |
36 | } |
37 | ||
9fa01778 | 38 | /// Returns `true` as long as there are more things to do. |
0bf4aa26 XL |
39 | /// |
40 | /// This is used by [priroda](https://github.com/oli-obk/priroda) | |
74b04a01 XL |
41 | /// |
42 | /// This is marked `#inline(always)` to work around adverserial codegen when `opt-level = 3` | |
43 | #[inline(always)] | |
dc9dc135 | 44 | pub fn step(&mut self) -> InterpResult<'tcx, bool> { |
ba9703b0 | 45 | if self.stack().is_empty() { |
ff7c6d11 XL |
46 | return Ok(false); |
47 | } | |
48 | ||
f9f354fc | 49 | let loc = match self.frame().loc { |
3dfed10e XL |
50 | Ok(loc) => loc, |
51 | Err(_) => { | |
60c5eb7d XL |
52 | // We are unwinding and this fn has no cleanup code. |
53 | // Just go on unwinding. | |
54 | trace!("unwinding: skipping frame"); | |
55 | self.pop_stack_frame(/* unwinding */ true)?; | |
dfeec247 | 56 | return Ok(true); |
60c5eb7d XL |
57 | } |
58 | }; | |
f9f354fc | 59 | let basic_block = &self.body().basic_blocks()[loc.block]; |
ff7c6d11 | 60 | |
ba9703b0 | 61 | let old_frames = self.frame_idx(); |
ff7c6d11 | 62 | |
f9f354fc | 63 | if let Some(stmt) = basic_block.statements.get(loc.statement_index) { |
ba9703b0 | 64 | assert_eq!(old_frames, self.frame_idx()); |
0531ce1d | 65 | self.statement(stmt)?; |
ff7c6d11 XL |
66 | return Ok(true); |
67 | } | |
68 | ||
0bf4aa26 | 69 | M::before_terminator(self)?; |
0531ce1d | 70 | |
ff7c6d11 | 71 | let terminator = basic_block.terminator(); |
ba9703b0 | 72 | assert_eq!(old_frames, self.frame_idx()); |
0531ce1d | 73 | self.terminator(terminator)?; |
ff7c6d11 XL |
74 | Ok(true) |
75 | } | |
76 | ||
3dfed10e XL |
77 | /// Runs the interpretation logic for the given `mir::Statement` at the current frame and |
78 | /// statement counter. This also moves the statement counter forward. | |
c295e0f8 | 79 | pub fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> { |
0731742a | 80 | info!("{:?}", stmt); |
ff7c6d11 | 81 | |
ba9703b0 | 82 | use rustc_middle::mir::StatementKind::*; |
ff7c6d11 | 83 | |
0731742a | 84 | // Some statements (e.g., box) push new stack frames. |
b7449926 | 85 | // We have to record the stack frame number *before* executing the statement. |
ba9703b0 | 86 | let frame_idx = self.frame_idx(); |
ff7c6d11 | 87 | |
ba9703b0 XL |
88 | match &stmt.kind { |
89 | Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?, | |
ff7c6d11 | 90 | |
ba9703b0 XL |
91 | SetDiscriminant { place, variant_index } => { |
92 | let dest = self.eval_place(**place)?; | |
6a06907d | 93 | self.write_discriminant(*variant_index, &dest)?; |
ff7c6d11 XL |
94 | } |
95 | ||
96 | // Mark locals as alive | |
97 | StorageLive(local) => { | |
fc512014 | 98 | self.storage_live(*local)?; |
ff7c6d11 XL |
99 | } |
100 | ||
101 | // Mark locals as dead | |
102 | StorageDead(local) => { | |
fc512014 | 103 | self.storage_dead(*local)?; |
ff7c6d11 XL |
104 | } |
105 | ||
0bf4aa26 | 106 | // No dynamic semantics attached to `FakeRead`; MIR |
94b46f34 | 107 | // interpreter is solely intended for borrowck'ed code. |
0bf4aa26 | 108 | FakeRead(..) => {} |
94b46f34 | 109 | |
a1dfa0c6 | 110 | // Stacked Borrows. |
ba9703b0 XL |
111 | Retag(kind, place) => { |
112 | let dest = self.eval_place(**place)?; | |
6a06907d XL |
113 | M::retag(self, *kind, &dest)?; |
114 | } | |
115 | ||
116 | // Call CopyNonOverlapping | |
117 | CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping { src, dst, count }) => { | |
118 | let src = self.eval_operand(src, None)?; | |
119 | let dst = self.eval_operand(dst, None)?; | |
120 | let count = self.eval_operand(count, None)?; | |
17df50a5 | 121 | self.copy_intrinsic(&src, &dst, &count, /* nonoverlapping */ true)?; |
ff7c6d11 | 122 | } |
ff7c6d11 | 123 | |
a1dfa0c6 | 124 | // Statements we do not track. |
b7449926 | 125 | AscribeUserType(..) => {} |
0531ce1d | 126 | |
3dfed10e XL |
127 | // Currently, Miri discards Coverage statements. Coverage statements are only injected |
128 | // via an optional compile time MIR pass and have no side effects. Since Coverage | |
129 | // statements don't exist at the source level, it is safe for Miri to ignore them, even | |
130 | // for undefined behavior (UB) checks. | |
131 | // | |
132 | // A coverage counter inside a const expression (for example, a counter injected in a | |
133 | // const function) is discarded when the const is evaluated at compile time. Whether | |
134 | // this should change, and/or how to implement a const eval counter, is a subject of the | |
135 | // following issue: | |
136 | // | |
137 | // FIXME(#73156): Handle source code coverage in const eval | |
138 | Coverage(..) => {} | |
139 | ||
ff7c6d11 XL |
140 | // Defined to do nothing. These are added by optimization passes, to avoid changing the |
141 | // size of MIR constantly. | |
142 | Nop => {} | |
143 | ||
ba9703b0 | 144 | LlvmInlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"), |
ff7c6d11 XL |
145 | } |
146 | ||
f9f354fc | 147 | self.stack_mut()[frame_idx].loc.as_mut().unwrap().statement_index += 1; |
ff7c6d11 XL |
148 | Ok(()) |
149 | } | |
150 | ||
b7449926 XL |
151 | /// Evaluate an assignment statement. |
152 | /// | |
153 | /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue | |
154 | /// type writes its results directly into the memory specified by the place. | |
e74abb32 | 155 | pub fn eval_rvalue_into_place( |
b7449926 XL |
156 | &mut self, |
157 | rvalue: &mir::Rvalue<'tcx>, | |
ba9703b0 | 158 | place: mir::Place<'tcx>, |
dc9dc135 | 159 | ) -> InterpResult<'tcx> { |
b7449926 XL |
160 | let dest = self.eval_place(place)?; |
161 | ||
ba9703b0 | 162 | use rustc_middle::mir::Rvalue::*; |
b7449926 | 163 | match *rvalue { |
f9f354fc | 164 | ThreadLocalRef(did) => { |
136023e0 XL |
165 | let ptr = M::thread_local_static_base_pointer(self, did)?; |
166 | self.write_pointer(ptr, &dest)?; | |
f9f354fc XL |
167 | } |
168 | ||
b7449926 XL |
169 | Use(ref operand) => { |
170 | // Avoid recomputing the layout | |
171 | let op = self.eval_operand(operand, Some(dest.layout))?; | |
6a06907d | 172 | self.copy_op(&op, &dest)?; |
b7449926 XL |
173 | } |
174 | ||
6a06907d | 175 | BinaryOp(bin_op, box (ref left, ref right)) => { |
60c5eb7d | 176 | let layout = binop_left_homogeneous(bin_op).then_some(dest.layout); |
6a06907d | 177 | let left = self.read_immediate(&self.eval_operand(left, layout)?)?; |
60c5eb7d | 178 | let layout = binop_right_homogeneous(bin_op).then_some(left.layout); |
6a06907d XL |
179 | let right = self.read_immediate(&self.eval_operand(right, layout)?)?; |
180 | self.binop_ignore_overflow(bin_op, &left, &right, &dest)?; | |
b7449926 XL |
181 | } |
182 | ||
6a06907d | 183 | CheckedBinaryOp(bin_op, box (ref left, ref right)) => { |
b7449926 | 184 | // Due to the extra boolean in the result, we can never reuse the `dest.layout`. |
6a06907d | 185 | let left = self.read_immediate(&self.eval_operand(left, None)?)?; |
60c5eb7d | 186 | let layout = binop_right_homogeneous(bin_op).then_some(left.layout); |
6a06907d XL |
187 | let right = self.read_immediate(&self.eval_operand(right, layout)?)?; |
188 | self.binop_with_overflow(bin_op, &left, &right, &dest)?; | |
b7449926 XL |
189 | } |
190 | ||
191 | UnaryOp(un_op, ref operand) => { | |
192 | // The operand always has the same type as the result. | |
6a06907d XL |
193 | let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?; |
194 | let val = self.unary_op(un_op, &val)?; | |
e1599b0c | 195 | assert_eq!(val.layout, dest.layout, "layout mismatch for result of {:?}", un_op); |
6a06907d | 196 | self.write_immediate(*val, &dest)?; |
b7449926 XL |
197 | } |
198 | ||
199 | Aggregate(ref kind, ref operands) => { | |
c295e0f8 | 200 | // active_field_index is for union initialization. |
b7449926 XL |
201 | let (dest, active_field_index) = match **kind { |
202 | mir::AggregateKind::Adt(adt_def, variant_index, _, _, active_field_index) => { | |
6a06907d | 203 | self.write_discriminant(variant_index, &dest)?; |
b7449926 | 204 | if adt_def.is_enum() { |
c295e0f8 XL |
205 | assert!(active_field_index.is_none()); |
206 | (self.place_downcast(&dest, variant_index)?, None) | |
b7449926 | 207 | } else { |
c295e0f8 XL |
208 | if active_field_index.is_some() { |
209 | assert_eq!(operands.len(), 1); | |
210 | } | |
b7449926 XL |
211 | (dest, active_field_index) |
212 | } | |
213 | } | |
dfeec247 | 214 | _ => (dest, None), |
b7449926 XL |
215 | }; |
216 | ||
217 | for (i, operand) in operands.iter().enumerate() { | |
218 | let op = self.eval_operand(operand, None)?; | |
c295e0f8 XL |
219 | let field_index = active_field_index.unwrap_or(i); |
220 | let field_dest = self.place_field(&dest, field_index)?; | |
221 | self.copy_op(&op, &field_dest)?; | |
b7449926 XL |
222 | } |
223 | } | |
224 | ||
225 | Repeat(ref operand, _) => { | |
17df50a5 XL |
226 | let src = self.eval_operand(operand, None)?; |
227 | assert!(!src.layout.is_unsized()); | |
6a06907d | 228 | let dest = self.force_allocation(&dest)?; |
a1dfa0c6 | 229 | let length = dest.len(self)?; |
b7449926 | 230 | |
17df50a5 XL |
231 | if length == 0 { |
232 | // Nothing to copy... but let's still make sure that `dest` as a place is valid. | |
233 | self.get_alloc_mut(&dest)?; | |
234 | } else { | |
235 | // Write the src to the first element. | |
6a06907d | 236 | let first = self.mplace_field(&dest, 0)?; |
17df50a5 XL |
237 | self.copy_op(&src, &first.into())?; |
238 | ||
239 | // This is performance-sensitive code for big static/const arrays! So we | |
240 | // avoid writing each operand individually and instead just make many copies | |
241 | // of the first element. | |
242 | let elem_size = first.layout.size; | |
243 | let first_ptr = first.ptr; | |
136023e0 | 244 | let rest_ptr = first_ptr.offset(elem_size, self)?; |
17df50a5 XL |
245 | self.memory.copy_repeatedly( |
246 | first_ptr, | |
247 | first.align, | |
248 | rest_ptr, | |
249 | first.align, | |
250 | elem_size, | |
251 | length - 1, | |
252 | /*nonoverlapping:*/ true, | |
253 | )?; | |
b7449926 XL |
254 | } |
255 | } | |
256 | ||
ba9703b0 | 257 | Len(place) => { |
b7449926 | 258 | let src = self.eval_place(place)?; |
6a06907d | 259 | let mplace = self.force_allocation(&src)?; |
a1dfa0c6 | 260 | let len = mplace.len(self)?; |
6a06907d | 261 | self.write_scalar(Scalar::from_machine_usize(len, self), &dest)?; |
b7449926 XL |
262 | } |
263 | ||
ba9703b0 | 264 | AddressOf(_, place) | Ref(_, _, place) => { |
b7449926 | 265 | let src = self.eval_place(place)?; |
6a06907d | 266 | let place = self.force_allocation(&src)?; |
136023e0 | 267 | self.write_immediate(place.to_ref(self), &dest)?; |
b7449926 XL |
268 | } |
269 | ||
270 | NullaryOp(mir::NullOp::Box, _) => { | |
6a06907d | 271 | M::box_alloc(self, &dest)?; |
b7449926 XL |
272 | } |
273 | ||
c295e0f8 | 274 | NullaryOp(null_op, ty) => { |
ba9703b0 | 275 | let ty = self.subst_from_current_frame_and_normalize_erasing_regions(ty); |
b7449926 | 276 | let layout = self.layout_of(ty)?; |
5869c6ff XL |
277 | if layout.is_unsized() { |
278 | // FIXME: This should be a span_bug (#80742) | |
279 | self.tcx.sess.delay_span_bug( | |
280 | self.frame().current_span(), | |
c295e0f8 | 281 | &format!("Nullary MIR operator called for unsized type {}", ty), |
5869c6ff XL |
282 | ); |
283 | throw_inval!(SizeOfUnsizedType(ty)); | |
284 | } | |
c295e0f8 XL |
285 | let val = match null_op { |
286 | mir::NullOp::SizeOf => layout.size.bytes(), | |
287 | mir::NullOp::AlignOf => layout.align.abi.bytes(), | |
288 | mir::NullOp::Box => unreachable!(), | |
289 | }; | |
290 | self.write_scalar(Scalar::from_machine_usize(val, self), &dest)?; | |
291 | } | |
292 | ||
293 | ShallowInitBox(ref operand, _) => { | |
294 | let src = self.eval_operand(operand, None)?; | |
295 | let v = self.read_immediate(&src)?; | |
296 | self.write_immediate(*v, &dest)?; | |
b7449926 XL |
297 | } |
298 | ||
f9f354fc | 299 | Cast(cast_kind, ref operand, cast_ty) => { |
b7449926 | 300 | let src = self.eval_operand(operand, None)?; |
f9f354fc | 301 | let cast_ty = self.subst_from_current_frame_and_normalize_erasing_regions(cast_ty); |
6a06907d | 302 | self.cast(&src, cast_kind, cast_ty, &dest)?; |
b7449926 XL |
303 | } |
304 | ||
ba9703b0 | 305 | Discriminant(place) => { |
9fa01778 | 306 | let op = self.eval_place_to_op(place, None)?; |
6a06907d XL |
307 | let discr_val = self.read_discriminant(&op)?.0; |
308 | self.write_scalar(discr_val, &dest)?; | |
b7449926 XL |
309 | } |
310 | } | |
311 | ||
3dfed10e | 312 | trace!("{:?}", self.dump_place(*dest)); |
b7449926 XL |
313 | |
314 | Ok(()) | |
315 | } | |
316 | ||
dc9dc135 | 317 | fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> { |
0731742a | 318 | info!("{:?}", terminator.kind); |
60c5eb7d | 319 | |
ff7c6d11 | 320 | self.eval_terminator(terminator)?; |
ba9703b0 | 321 | if !self.stack().is_empty() { |
3dfed10e | 322 | if let Ok(loc) = self.frame().loc { |
f9f354fc | 323 | info!("// executing {:?}", loc.block); |
60c5eb7d | 324 | } |
ff7c6d11 XL |
325 | } |
326 | Ok(()) | |
327 | } | |
ff7c6d11 | 328 | } |