]>
Commit | Line | Data |
---|---|---|
b7449926 XL |
1 | //! Functions concerning immediate values and operands, and reading from operands. |
2 | //! All high-level functions to read from memory work on operands as sources. | |
3 | ||
ba9703b0 XL |
4 | use std::convert::TryFrom; |
5 | use std::fmt::Write; | |
b7449926 | 6 | |
ba9703b0 XL |
7 | use rustc_errors::ErrorReported; |
8 | use rustc_hir::def::Namespace; | |
60c5eb7d | 9 | use rustc_macros::HashStable; |
c295e0f8 | 10 | use rustc_middle::ty::layout::{LayoutOf, PrimitiveExt, TyAndLayout}; |
ba9703b0 | 11 | use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter, Printer}; |
f035d41b | 12 | use rustc_middle::ty::{ConstInt, Ty}; |
ba9703b0 | 13 | use rustc_middle::{mir, ty}; |
c295e0f8 | 14 | use rustc_target::abi::{Abi, HasDataLayout, Size, TagEncoding}; |
ba9703b0 XL |
15 | use rustc_target::abi::{VariantIdx, Variants}; |
16 | ||
17 | use super::{ | |
136023e0 XL |
18 | alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, GlobalId, |
19 | InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, Provenance, | |
20 | Scalar, ScalarMaybeUninit, | |
ba9703b0 | 21 | }; |
0bf4aa26 | 22 | |
e74abb32 | 23 | /// An `Immediate` represents a single immediate self-contained Rust value. |
b7449926 XL |
24 | /// |
25 | /// For optimization of a few very common cases, there is also a representation for a pair of | |
26 | /// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary | |
60c5eb7d | 27 | /// operations and wide pointers. This idea was taken from rustc's codegen. |
b7449926 | 28 | /// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely |
a1dfa0c6 | 29 | /// defined on `Immediate`, and do not have to work with a `Place`. |
136023e0 XL |
30 | #[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)] |
31 | pub enum Immediate<Tag: Provenance = AllocId> { | |
f9f354fc XL |
32 | Scalar(ScalarMaybeUninit<Tag>), |
33 | ScalarPair(ScalarMaybeUninit<Tag>, ScalarMaybeUninit<Tag>), | |
b7449926 XL |
34 | } |
35 | ||
6a06907d XL |
36 | #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] |
37 | rustc_data_structures::static_assert_size!(Immediate, 56); | |
38 | ||
136023e0 | 39 | impl<Tag: Provenance> From<ScalarMaybeUninit<Tag>> for Immediate<Tag> { |
416331ca | 40 | #[inline(always)] |
f9f354fc | 41 | fn from(val: ScalarMaybeUninit<Tag>) -> Self { |
416331ca XL |
42 | Immediate::Scalar(val) |
43 | } | |
44 | } | |
45 | ||
136023e0 | 46 | impl<Tag: Provenance> From<Scalar<Tag>> for Immediate<Tag> { |
416331ca XL |
47 | #[inline(always)] |
48 | fn from(val: Scalar<Tag>) -> Self { | |
49 | Immediate::Scalar(val.into()) | |
9fa01778 | 50 | } |
416331ca | 51 | } |
9fa01778 | 52 | |
136023e0 XL |
53 | impl<'tcx, Tag: Provenance> Immediate<Tag> { |
54 | pub fn from_pointer(p: Pointer<Tag>, cx: &impl HasDataLayout) -> Self { | |
55 | Immediate::Scalar(ScalarMaybeUninit::from_pointer(p, cx)) | |
56 | } | |
57 | ||
58 | pub fn from_maybe_pointer(p: Pointer<Option<Tag>>, cx: &impl HasDataLayout) -> Self { | |
59 | Immediate::Scalar(ScalarMaybeUninit::from_maybe_pointer(p, cx)) | |
60c5eb7d | 60 | } |
60c5eb7d | 61 | |
dfeec247 | 62 | pub fn new_slice(val: Scalar<Tag>, len: u64, cx: &impl HasDataLayout) -> Self { |
ba9703b0 | 63 | Immediate::ScalarPair(val.into(), Scalar::from_machine_usize(len, cx).into()) |
b7449926 XL |
64 | } |
65 | ||
94222f64 XL |
66 | pub fn new_dyn_trait( |
67 | val: Scalar<Tag>, | |
68 | vtable: Pointer<Option<Tag>>, | |
69 | cx: &impl HasDataLayout, | |
70 | ) -> Self { | |
71 | Immediate::ScalarPair(val.into(), ScalarMaybeUninit::from_maybe_pointer(vtable, cx)) | |
b7449926 XL |
72 | } |
73 | ||
74 | #[inline] | |
3dfed10e | 75 | pub fn to_scalar_or_uninit(self) -> ScalarMaybeUninit<Tag> { |
b7449926 | 76 | match self { |
a1dfa0c6 | 77 | Immediate::Scalar(val) => val, |
94222f64 | 78 | Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"), |
b7449926 XL |
79 | } |
80 | } | |
81 | ||
82 | #[inline] | |
dc9dc135 | 83 | pub fn to_scalar(self) -> InterpResult<'tcx, Scalar<Tag>> { |
3dfed10e | 84 | self.to_scalar_or_uninit().check_init() |
b7449926 | 85 | } |
94222f64 XL |
86 | |
87 | #[inline] | |
88 | pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> { | |
89 | match self { | |
90 | Immediate::ScalarPair(val1, val2) => Ok((val1.check_init()?, val2.check_init()?)), | |
91 | Immediate::Scalar(..) => { | |
92 | bug!("Got a scalar where a scalar pair was expected") | |
93 | } | |
94 | } | |
95 | } | |
b7449926 XL |
96 | } |
97 | ||
a1dfa0c6 | 98 | // ScalarPair needs a type to interpret, so we often have an immediate and a type together |
b7449926 XL |
99 | // as input for binary and cast operations. |
100 | #[derive(Copy, Clone, Debug)] | |
136023e0 | 101 | pub struct ImmTy<'tcx, Tag: Provenance = AllocId> { |
ba9703b0 XL |
102 | imm: Immediate<Tag>, |
103 | pub layout: TyAndLayout<'tcx>, | |
b7449926 XL |
104 | } |
105 | ||
6a06907d XL |
106 | #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] |
107 | rustc_data_structures::static_assert_size!(ImmTy<'_>, 72); | |
108 | ||
a2a8927a | 109 | impl<Tag: Provenance> std::fmt::Display for ImmTy<'_, Tag> { |
ba9703b0 XL |
110 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { |
111 | /// Helper function for printing a scalar to a FmtPrinter | |
136023e0 | 112 | fn p<'a, 'tcx, F: std::fmt::Write, Tag: Provenance>( |
ba9703b0 | 113 | cx: FmtPrinter<'a, 'tcx, F>, |
f9f354fc | 114 | s: ScalarMaybeUninit<Tag>, |
ba9703b0 XL |
115 | ty: Ty<'tcx>, |
116 | ) -> Result<FmtPrinter<'a, 'tcx, F>, std::fmt::Error> { | |
117 | match s { | |
136023e0 XL |
118 | ScalarMaybeUninit::Scalar(Scalar::Int(int)) => { |
119 | cx.pretty_print_const_scalar_int(int, ty, true) | |
120 | } | |
121 | ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _sz)) => { | |
122 | // Just print the ptr value. `pretty_print_const_scalar_ptr` would also try to | |
123 | // print what is points to, which would fail since it has no access to the local | |
124 | // memory. | |
125 | cx.pretty_print_const_pointer(ptr, ty, true) | |
ba9703b0 | 126 | } |
f9f354fc | 127 | ScalarMaybeUninit::Uninit => cx.typed_value( |
ba9703b0 | 128 | |mut this| { |
6a06907d | 129 | this.write_str("uninit ")?; |
ba9703b0 XL |
130 | Ok(this) |
131 | }, | |
132 | |this| this.print_type(ty), | |
133 | " ", | |
134 | ), | |
74b04a01 | 135 | } |
ba9703b0 XL |
136 | } |
137 | ty::tls::with(|tcx| { | |
138 | match self.imm { | |
139 | Immediate::Scalar(s) => { | |
29967ef6 | 140 | if let Some(ty) = tcx.lift(self.layout.ty) { |
ba9703b0 XL |
141 | let cx = FmtPrinter::new(tcx, f, Namespace::ValueNS); |
142 | p(cx, s, ty)?; | |
143 | return Ok(()); | |
60c5eb7d | 144 | } |
136023e0 | 145 | write!(f, "{}: {}", s, self.layout.ty) |
ba9703b0 XL |
146 | } |
147 | Immediate::ScalarPair(a, b) => { | |
148 | // FIXME(oli-obk): at least print tuples and slices nicely | |
136023e0 | 149 | write!(f, "({}, {}): {}", a, b, self.layout.ty,) |
dfeec247 | 150 | } |
74b04a01 | 151 | } |
ba9703b0 | 152 | }) |
60c5eb7d XL |
153 | } |
154 | } | |
155 | ||
136023e0 | 156 | impl<'tcx, Tag: Provenance> std::ops::Deref for ImmTy<'tcx, Tag> { |
a1dfa0c6 | 157 | type Target = Immediate<Tag>; |
b7449926 | 158 | #[inline(always)] |
a1dfa0c6 | 159 | fn deref(&self) -> &Immediate<Tag> { |
9fa01778 | 160 | &self.imm |
b7449926 XL |
161 | } |
162 | } | |
163 | ||
164 | /// An `Operand` is the result of computing a `mir::Operand`. It can be immediate, | |
9fa01778 | 165 | /// or still in memory. The latter is an optimization, to delay reading that chunk of |
b7449926 | 166 | /// memory and to avoid having to store arbitrary-sized data here. |
136023e0 XL |
167 | #[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)] |
168 | pub enum Operand<Tag: Provenance = AllocId> { | |
f9f354fc XL |
169 | Immediate(Immediate<Tag>), |
170 | Indirect(MemPlace<Tag>), | |
b7449926 XL |
171 | } |
172 | ||
136023e0 XL |
173 | #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] |
174 | pub struct OpTy<'tcx, Tag: Provenance = AllocId> { | |
60c5eb7d | 175 | op: Operand<Tag>, // Keep this private; it helps enforce invariants. |
ba9703b0 | 176 | pub layout: TyAndLayout<'tcx>, |
b7449926 XL |
177 | } |
178 | ||
6a06907d | 179 | #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] |
136023e0 | 180 | rustc_data_structures::static_assert_size!(OpTy<'_>, 80); |
6a06907d | 181 | |
136023e0 | 182 | impl<'tcx, Tag: Provenance> std::ops::Deref for OpTy<'tcx, Tag> { |
0bf4aa26 | 183 | type Target = Operand<Tag>; |
b7449926 | 184 | #[inline(always)] |
0bf4aa26 | 185 | fn deref(&self) -> &Operand<Tag> { |
b7449926 XL |
186 | &self.op |
187 | } | |
188 | } | |
189 | ||
136023e0 | 190 | impl<'tcx, Tag: Provenance> From<MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> { |
b7449926 | 191 | #[inline(always)] |
0bf4aa26 | 192 | fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self { |
dfeec247 | 193 | OpTy { op: Operand::Indirect(*mplace), layout: mplace.layout } |
b7449926 XL |
194 | } |
195 | } | |
196 | ||
136023e0 | 197 | impl<'tcx, Tag: Provenance> From<&'_ MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> { |
6a06907d XL |
198 | #[inline(always)] |
199 | fn from(mplace: &MPlaceTy<'tcx, Tag>) -> Self { | |
200 | OpTy { op: Operand::Indirect(**mplace), layout: mplace.layout } | |
201 | } | |
202 | } | |
203 | ||
136023e0 | 204 | impl<'tcx, Tag: Provenance> From<ImmTy<'tcx, Tag>> for OpTy<'tcx, Tag> { |
b7449926 | 205 | #[inline(always)] |
a1dfa0c6 | 206 | fn from(val: ImmTy<'tcx, Tag>) -> Self { |
dfeec247 | 207 | OpTy { op: Operand::Immediate(val.imm), layout: val.layout } |
b7449926 XL |
208 | } |
209 | } | |
210 | ||
136023e0 | 211 | impl<'tcx, Tag: Provenance> ImmTy<'tcx, Tag> { |
9fa01778 | 212 | #[inline] |
ba9703b0 | 213 | pub fn from_scalar(val: Scalar<Tag>, layout: TyAndLayout<'tcx>) -> Self { |
416331ca | 214 | ImmTy { imm: val.into(), layout } |
9fa01778 XL |
215 | } |
216 | ||
dfeec247 | 217 | #[inline] |
ba9703b0 XL |
218 | pub fn from_immediate(imm: Immediate<Tag>, layout: TyAndLayout<'tcx>) -> Self { |
219 | ImmTy { imm, layout } | |
220 | } | |
221 | ||
222 | #[inline] | |
223 | pub fn try_from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Option<Self> { | |
dfeec247 XL |
224 | Some(Self::from_scalar(Scalar::try_from_uint(i, layout.size)?, layout)) |
225 | } | |
e1599b0c | 226 | #[inline] |
ba9703b0 | 227 | pub fn from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self { |
e1599b0c XL |
228 | Self::from_scalar(Scalar::from_uint(i, layout.size), layout) |
229 | } | |
230 | ||
dfeec247 | 231 | #[inline] |
ba9703b0 | 232 | pub fn try_from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Option<Self> { |
dfeec247 XL |
233 | Some(Self::from_scalar(Scalar::try_from_int(i, layout.size)?, layout)) |
234 | } | |
235 | ||
e1599b0c | 236 | #[inline] |
ba9703b0 | 237 | pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self { |
e1599b0c XL |
238 | Self::from_scalar(Scalar::from_int(i, layout.size), layout) |
239 | } | |
f035d41b XL |
240 | |
241 | #[inline] | |
242 | pub fn to_const_int(self) -> ConstInt { | |
243 | assert!(self.layout.ty.is_integral()); | |
29967ef6 XL |
244 | let int = self.to_scalar().expect("to_const_int doesn't work on scalar pairs").assert_int(); |
245 | ConstInt::new(int, self.layout.ty.is_signed(), self.layout.ty.is_ptr_sized_integral()) | |
f035d41b | 246 | } |
9fa01778 XL |
247 | } |
248 | ||
ba9703b0 | 249 | impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { |
dc9dc135 | 250 | /// Try reading an immediate in memory; this is interesting particularly for `ScalarPair`. |
9fa01778 XL |
251 | /// Returns `None` if the layout does not permit loading this as a value. |
252 | fn try_read_immediate_from_mplace( | |
b7449926 | 253 | &self, |
6a06907d | 254 | mplace: &MPlaceTy<'tcx, M::PointerTag>, |
dc9dc135 | 255 | ) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::PointerTag>>> { |
b7449926 | 256 | if mplace.layout.is_unsized() { |
0bf4aa26 | 257 | // Don't touch unsized |
b7449926 XL |
258 | return Ok(None); |
259 | } | |
b7449926 | 260 | |
17df50a5 | 261 | let alloc = match self.get_alloc(mplace)? { |
dc9dc135 | 262 | Some(ptr) => ptr, |
dfeec247 XL |
263 | None => { |
264 | return Ok(Some(ImmTy { | |
265 | // zero-sized type | |
29967ef6 | 266 | imm: Scalar::ZST.into(), |
dfeec247 XL |
267 | layout: mplace.layout, |
268 | })); | |
269 | } | |
dc9dc135 | 270 | }; |
b7449926 | 271 | |
b7449926 | 272 | match mplace.layout.abi { |
ba9703b0 | 273 | Abi::Scalar(..) => { |
17df50a5 | 274 | let scalar = alloc.read_scalar(alloc_range(Size::ZERO, mplace.layout.size))?; |
dfeec247 | 275 | Ok(Some(ImmTy { imm: scalar.into(), layout: mplace.layout })) |
b7449926 | 276 | } |
c295e0f8 | 277 | Abi::ScalarPair(a, b) => { |
dc9dc135 XL |
278 | // We checked `ptr_align` above, so all fields will have the alignment they need. |
279 | // We would anyway check against `ptr_align.restrict_for_offset(b_offset)`, | |
280 | // which `ptr.offset(b_offset)` cannot possibly fail to satisfy. | |
c295e0f8 | 281 | let (a, b) = (a.value, b.value); |
b7449926 | 282 | let (a_size, b_size) = (a.size(self), b.size(self)); |
a1dfa0c6 | 283 | let b_offset = a_size.align_to(b.align(self).abi); |
dc9dc135 | 284 | assert!(b_offset.bytes() > 0); // we later use the offset to tell apart the fields |
17df50a5 XL |
285 | let a_val = alloc.read_scalar(alloc_range(Size::ZERO, a_size))?; |
286 | let b_val = alloc.read_scalar(alloc_range(b_offset, b_size))?; | |
dfeec247 | 287 | Ok(Some(ImmTy { imm: Immediate::ScalarPair(a_val, b_val), layout: mplace.layout })) |
b7449926 XL |
288 | } |
289 | _ => Ok(None), | |
290 | } | |
291 | } | |
292 | ||
a1dfa0c6 XL |
293 | /// Try returning an immediate for the operand. |
294 | /// If the layout does not permit loading this as an immediate, return where in memory | |
b7449926 XL |
295 | /// we can find the data. |
296 | /// Note that for a given layout, this operation will either always fail or always | |
297 | /// succeed! Whether it succeeds depends on whether the layout can be represented | |
94222f64 | 298 | /// in an `Immediate`, not on which data is stored there currently. |
c295e0f8 | 299 | pub fn try_read_immediate( |
b7449926 | 300 | &self, |
6a06907d | 301 | src: &OpTy<'tcx, M::PointerTag>, |
dc9dc135 | 302 | ) -> InterpResult<'tcx, Result<ImmTy<'tcx, M::PointerTag>, MPlaceTy<'tcx, M::PointerTag>>> { |
136023e0 | 303 | Ok(match src.try_as_mplace() { |
6a06907d | 304 | Ok(ref mplace) => { |
a1dfa0c6 | 305 | if let Some(val) = self.try_read_immediate_from_mplace(mplace)? { |
b7449926 XL |
306 | Ok(val) |
307 | } else { | |
6a06907d | 308 | Err(*mplace) |
b7449926 | 309 | } |
dfeec247 | 310 | } |
b7449926 XL |
311 | Err(val) => Ok(val), |
312 | }) | |
313 | } | |
314 | ||
a1dfa0c6 | 315 | /// Read an immediate from a place, asserting that that is possible with the given layout. |
b7449926 | 316 | #[inline(always)] |
a1dfa0c6 | 317 | pub fn read_immediate( |
0bf4aa26 | 318 | &self, |
6a06907d | 319 | op: &OpTy<'tcx, M::PointerTag>, |
dc9dc135 | 320 | ) -> InterpResult<'tcx, ImmTy<'tcx, M::PointerTag>> { |
9fa01778 | 321 | if let Ok(imm) = self.try_read_immediate(op)? { |
dc9dc135 | 322 | Ok(imm) |
b7449926 | 323 | } else { |
f035d41b | 324 | span_bug!(self.cur_span(), "primitive read failed for type: {:?}", op.layout.ty); |
b7449926 XL |
325 | } |
326 | } | |
327 | ||
328 | /// Read a scalar from a place | |
0bf4aa26 XL |
329 | pub fn read_scalar( |
330 | &self, | |
6a06907d | 331 | op: &OpTy<'tcx, M::PointerTag>, |
f9f354fc | 332 | ) -> InterpResult<'tcx, ScalarMaybeUninit<M::PointerTag>> { |
3dfed10e | 333 | Ok(self.read_immediate(op)?.to_scalar_or_uninit()) |
b7449926 XL |
334 | } |
335 | ||
136023e0 XL |
336 | /// Read a pointer from a place. |
337 | pub fn read_pointer( | |
338 | &self, | |
339 | op: &OpTy<'tcx, M::PointerTag>, | |
340 | ) -> InterpResult<'tcx, Pointer<Option<M::PointerTag>>> { | |
341 | Ok(self.scalar_to_ptr(self.read_scalar(op)?.check_init()?)) | |
342 | } | |
343 | ||
60c5eb7d | 344 | // Turn the wide MPlace into a string (must already be dereferenced!) |
6a06907d | 345 | pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::PointerTag>) -> InterpResult<'tcx, &str> { |
b7449926 | 346 | let len = mplace.len(self)?; |
ba9703b0 | 347 | let bytes = self.memory.read_bytes(mplace.ptr, Size::from_bytes(len))?; |
29967ef6 | 348 | let str = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?; |
b7449926 XL |
349 | Ok(str) |
350 | } | |
351 | ||
b7449926 XL |
352 | /// Projection functions |
353 | pub fn operand_field( | |
354 | &self, | |
6a06907d | 355 | op: &OpTy<'tcx, M::PointerTag>, |
ba9703b0 | 356 | field: usize, |
dc9dc135 | 357 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
136023e0 | 358 | let base = match op.try_as_mplace() { |
6a06907d | 359 | Ok(ref mplace) => { |
ba9703b0 | 360 | // We can reuse the mplace field computation logic for indirect operands. |
b7449926 XL |
361 | let field = self.mplace_field(mplace, field)?; |
362 | return Ok(field.into()); | |
dfeec247 XL |
363 | } |
364 | Err(value) => value, | |
b7449926 XL |
365 | }; |
366 | ||
94222f64 | 367 | let field_layout = op.layout.field(self, field); |
b7449926 | 368 | if field_layout.is_zst() { |
29967ef6 | 369 | let immediate = Scalar::ZST.into(); |
a1dfa0c6 | 370 | return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }); |
b7449926 XL |
371 | } |
372 | let offset = op.layout.fields.offset(field); | |
dc9dc135 | 373 | let immediate = match *base { |
b7449926 | 374 | // the field covers the entire type |
dc9dc135 | 375 | _ if offset.bytes() == 0 && field_layout.size == op.layout.size => *base, |
b7449926 | 376 | // extract fields from types with `ScalarPair` ABI |
a1dfa0c6 | 377 | Immediate::ScalarPair(a, b) => { |
b7449926 | 378 | let val = if offset.bytes() == 0 { a } else { b }; |
416331ca | 379 | Immediate::from(val) |
dfeec247 | 380 | } |
f035d41b XL |
381 | Immediate::Scalar(val) => span_bug!( |
382 | self.cur_span(), | |
383 | "field access on non aggregate {:#?}, {:#?}", | |
384 | val, | |
385 | op.layout | |
386 | ), | |
b7449926 | 387 | }; |
a1dfa0c6 | 388 | Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }) |
b7449926 XL |
389 | } |
390 | ||
ba9703b0 XL |
391 | pub fn operand_index( |
392 | &self, | |
6a06907d | 393 | op: &OpTy<'tcx, M::PointerTag>, |
ba9703b0 XL |
394 | index: u64, |
395 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { | |
396 | if let Ok(index) = usize::try_from(index) { | |
397 | // We can just treat this as a field. | |
398 | self.operand_field(op, index) | |
399 | } else { | |
400 | // Indexing into a big array. This must be an mplace. | |
136023e0 | 401 | let mplace = op.assert_mem_place(); |
6a06907d | 402 | Ok(self.mplace_index(&mplace, index)?.into()) |
ba9703b0 XL |
403 | } |
404 | } | |
405 | ||
b7449926 XL |
406 | pub fn operand_downcast( |
407 | &self, | |
6a06907d | 408 | op: &OpTy<'tcx, M::PointerTag>, |
a1dfa0c6 | 409 | variant: VariantIdx, |
dc9dc135 | 410 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
b7449926 | 411 | // Downcasts only change the layout |
136023e0 | 412 | Ok(match op.try_as_mplace() { |
6a06907d | 413 | Ok(ref mplace) => self.mplace_downcast(mplace, variant)?.into(), |
b7449926 XL |
414 | Err(..) => { |
415 | let layout = op.layout.for_variant(self, variant); | |
6a06907d | 416 | OpTy { layout, ..*op } |
b7449926 XL |
417 | } |
418 | }) | |
419 | } | |
420 | ||
b7449926 XL |
421 | pub fn operand_projection( |
422 | &self, | |
6a06907d | 423 | base: &OpTy<'tcx, M::PointerTag>, |
f9f354fc | 424 | proj_elem: mir::PlaceElem<'tcx>, |
dc9dc135 | 425 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
ba9703b0 | 426 | use rustc_middle::mir::ProjectionElem::*; |
f9f354fc | 427 | Ok(match proj_elem { |
ba9703b0 | 428 | Field(field, _) => self.operand_field(base, field.index())?, |
b7449926 XL |
429 | Downcast(_, variant) => self.operand_downcast(base, variant)?, |
430 | Deref => self.deref_operand(base)?.into(), | |
dfeec247 | 431 | Subslice { .. } | ConstantIndex { .. } | Index(_) => { |
b7449926 XL |
432 | // The rest should only occur as mplace, we do not use Immediates for types |
433 | // allowing such operations. This matches place_projection forcing an allocation. | |
136023e0 | 434 | let mplace = base.assert_mem_place(); |
6a06907d | 435 | self.mplace_projection(&mplace, proj_elem)?.into() |
b7449926 XL |
436 | } |
437 | }) | |
438 | } | |
439 | ||
3c0e092e XL |
440 | /// Converts a repr(simd) operand into an operand where `place_index` accesses the SIMD elements. |
441 | /// Also returns the number of elements. | |
442 | pub fn operand_to_simd( | |
443 | &self, | |
444 | base: &OpTy<'tcx, M::PointerTag>, | |
445 | ) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, u64)> { | |
446 | // Basically we just transmute this place into an array following simd_size_and_type. | |
447 | // This only works in memory, but repr(simd) types should never be immediates anyway. | |
448 | assert!(base.layout.ty.is_simd()); | |
449 | self.mplace_to_simd(&base.assert_mem_place()) | |
450 | } | |
451 | ||
f035d41b XL |
452 | /// Read from a local. Will not actually access the local if reading from a ZST. |
453 | /// Will not access memory, instead an indirect `Operand` is returned. | |
454 | /// | |
455 | /// This is public because it is used by [priroda](https://github.com/oli-obk/priroda) to get an | |
456 | /// OpTy from a local | |
0bf4aa26 XL |
457 | pub fn access_local( |
458 | &self, | |
a1dfa0c6 | 459 | frame: &super::Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>, |
0bf4aa26 | 460 | local: mir::Local, |
ba9703b0 | 461 | layout: Option<TyAndLayout<'tcx>>, |
dc9dc135 | 462 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
9fa01778 | 463 | let layout = self.layout_of_local(frame, local, layout)?; |
48663c56 XL |
464 | let op = if layout.is_zst() { |
465 | // Do not read from ZST, they might not be initialized | |
29967ef6 | 466 | Operand::Immediate(Scalar::ZST.into()) |
48663c56 | 467 | } else { |
e74abb32 | 468 | M::access_local(&self, frame, local)? |
48663c56 | 469 | }; |
0bf4aa26 XL |
470 | Ok(OpTy { op, layout }) |
471 | } | |
472 | ||
ba9703b0 XL |
473 | /// Every place can be read from, so we can turn them into an operand. |
474 | /// This will definitely return `Indirect` if the place is a `Ptr`, i.e., this | |
475 | /// will never actually read from memory. | |
9fa01778 XL |
476 | #[inline(always)] |
477 | pub fn place_to_op( | |
478 | &self, | |
6a06907d | 479 | place: &PlaceTy<'tcx, M::PointerTag>, |
dc9dc135 | 480 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
6a06907d | 481 | let op = match **place { |
dfeec247 | 482 | Place::Ptr(mplace) => Operand::Indirect(mplace), |
ba9703b0 XL |
483 | Place::Local { frame, local } => { |
484 | *self.access_local(&self.stack()[frame], local, None)? | |
485 | } | |
9fa01778 XL |
486 | }; |
487 | Ok(OpTy { op, layout: place.layout }) | |
488 | } | |
489 | ||
b7449926 | 490 | // Evaluate a place with the goal of reading from it. This lets us sometimes |
9fa01778 | 491 | // avoid allocations. |
e74abb32 | 492 | pub fn eval_place_to_op( |
b7449926 | 493 | &self, |
ba9703b0 XL |
494 | place: mir::Place<'tcx>, |
495 | layout: Option<TyAndLayout<'tcx>>, | |
dc9dc135 | 496 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
f9f354fc XL |
497 | // Do not use the layout passed in as argument if the base we are looking at |
498 | // here is not the entire place. | |
499 | let layout = if place.projection.is_empty() { layout } else { None }; | |
500 | ||
501 | let base_op = self.access_local(self.frame(), place.local, layout)?; | |
b7449926 | 502 | |
dfeec247 XL |
503 | let op = place |
504 | .projection | |
505 | .iter() | |
6a06907d | 506 | .try_fold(base_op, |op, elem| self.operand_projection(&op, elem))?; |
e1599b0c XL |
507 | |
508 | trace!("eval_place_to_op: got {:?}", *op); | |
f9f354fc XL |
509 | // Sanity-check the type we ended up with. |
510 | debug_assert!(mir_assign_valid_types( | |
511 | *self.tcx, | |
f035d41b | 512 | self.param_env, |
f9f354fc XL |
513 | self.layout_of(self.subst_from_current_frame_and_normalize_erasing_regions( |
514 | place.ty(&self.frame().body.local_decls, *self.tcx).ty | |
a2a8927a | 515 | )?)?, |
f9f354fc XL |
516 | op.layout, |
517 | )); | |
e1599b0c | 518 | Ok(op) |
b7449926 XL |
519 | } |
520 | ||
521 | /// Evaluate the operand, returning a place where you can then find the data. | |
dc9dc135 | 522 | /// If you already know the layout, you can save two table lookups |
b7449926 | 523 | /// by passing it in here. |
6a06907d | 524 | #[inline] |
b7449926 XL |
525 | pub fn eval_operand( |
526 | &self, | |
527 | mir_op: &mir::Operand<'tcx>, | |
ba9703b0 | 528 | layout: Option<TyAndLayout<'tcx>>, |
dc9dc135 | 529 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
ba9703b0 | 530 | use rustc_middle::mir::Operand::*; |
b7449926 XL |
531 | let op = match *mir_op { |
532 | // FIXME: do some more logic on `move` to invalidate the old location | |
ba9703b0 | 533 | Copy(place) | Move(place) => self.eval_place_to_op(place, layout)?, |
b7449926 | 534 | |
e1599b0c | 535 | Constant(ref constant) => { |
ba9703b0 | 536 | let val = |
a2a8927a | 537 | self.subst_from_current_frame_and_normalize_erasing_regions(constant.literal)?; |
5869c6ff XL |
538 | // This can still fail: |
539 | // * During ConstProp, with `TooGeneric` or since the `requried_consts` were not all | |
540 | // checked yet. | |
541 | // * During CTFE, since promoteds in `const`/`static` initializer bodies can fail. | |
6a06907d XL |
542 | |
543 | self.mir_const_to_op(&val, layout)? | |
e1599b0c | 544 | } |
b7449926 XL |
545 | }; |
546 | trace!("{:?}: {:?}", mir_op, *op); | |
547 | Ok(op) | |
548 | } | |
549 | ||
550 | /// Evaluate a bunch of operands at once | |
551 | pub(super) fn eval_operands( | |
552 | &self, | |
553 | ops: &[mir::Operand<'tcx>], | |
dc9dc135 | 554 | ) -> InterpResult<'tcx, Vec<OpTy<'tcx, M::PointerTag>>> { |
74b04a01 | 555 | ops.iter().map(|op| self.eval_operand(op, None)).collect() |
b7449926 XL |
556 | } |
557 | ||
9fa01778 XL |
558 | // Used when the miri-engine runs into a constant and for extracting information from constants |
559 | // in patterns via the `const_eval` module | |
e1599b0c XL |
560 | /// The `val` and `layout` are assumed to already be in our interpreter |
561 | /// "universe" (param_env). | |
c295e0f8 | 562 | pub fn const_to_op( |
9fa01778 | 563 | &self, |
5099ac24 | 564 | val: ty::Const<'tcx>, |
ba9703b0 | 565 | layout: Option<TyAndLayout<'tcx>>, |
dc9dc135 | 566 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { |
5099ac24 | 567 | match val.val() { |
1b1a35ee | 568 | ty::ConstKind::Param(_) | ty::ConstKind::Bound(..) => throw_inval!(TooGeneric), |
29967ef6 | 569 | ty::ConstKind::Error(_) => throw_inval!(AlreadyReported(ErrorReported)), |
94222f64 | 570 | ty::ConstKind::Unevaluated(uv) => { |
5099ac24 | 571 | let instance = self.resolve(uv.def, uv.substs)?; |
94222f64 | 572 | Ok(self.eval_to_allocation(GlobalId { instance, promoted: uv.promoted })?.into()) |
dfeec247 | 573 | } |
1b1a35ee | 574 | ty::ConstKind::Infer(..) | ty::ConstKind::Placeholder(..) => { |
3dfed10e | 575 | span_bug!(self.cur_span(), "const_to_op: Unexpected ConstKind {:?}", val) |
dc9dc135 | 576 | } |
5099ac24 | 577 | ty::ConstKind::Value(val_val) => self.const_val_to_op(val_val, val.ty(), layout), |
6a06907d XL |
578 | } |
579 | } | |
580 | ||
c295e0f8 | 581 | pub fn mir_const_to_op( |
6a06907d XL |
582 | &self, |
583 | val: &mir::ConstantKind<'tcx>, | |
584 | layout: Option<TyAndLayout<'tcx>>, | |
585 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { | |
586 | match val { | |
5099ac24 FG |
587 | mir::ConstantKind::Ty(ct) => self.const_to_op(*ct, layout), |
588 | mir::ConstantKind::Val(val, ty) => self.const_val_to_op(*val, *ty, layout), | |
6a06907d XL |
589 | } |
590 | } | |
591 | ||
592 | crate fn const_val_to_op( | |
593 | &self, | |
594 | val_val: ConstValue<'tcx>, | |
595 | ty: Ty<'tcx>, | |
596 | layout: Option<TyAndLayout<'tcx>>, | |
597 | ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { | |
dc9dc135 | 598 | // Other cases need layout. |
6a06907d XL |
599 | let tag_scalar = |scalar| -> InterpResult<'tcx, _> { |
600 | Ok(match scalar { | |
136023e0 | 601 | Scalar::Ptr(ptr, size) => Scalar::Ptr(self.global_base_pointer(ptr)?, size), |
6a06907d XL |
602 | Scalar::Int(int) => Scalar::Int(int), |
603 | }) | |
604 | }; | |
605 | let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?; | |
60c5eb7d | 606 | let op = match val_val { |
416331ca | 607 | ConstValue::ByRef { alloc, offset } => { |
f9f354fc | 608 | let id = self.tcx.create_memory_alloc(alloc); |
dc9dc135 XL |
609 | // We rely on mutability being set correctly in that allocation to prevent writes |
610 | // where none should happen. | |
3dfed10e | 611 | let ptr = self.global_base_pointer(Pointer::new(id, offset))?; |
136023e0 | 612 | Operand::Indirect(MemPlace::from_ptr(ptr.into(), layout.align.abi)) |
dfeec247 | 613 | } |
94222f64 | 614 | ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x)?.into()), |
dc9dc135 XL |
615 | ConstValue::Slice { data, start, end } => { |
616 | // We rely on mutability being set correctly in `data` to prevent writes | |
617 | // where none should happen. | |
618 | let ptr = Pointer::new( | |
f9f354fc | 619 | self.tcx.create_memory_alloc(data), |
ba9703b0 | 620 | Size::from_bytes(start), // offset: `start` |
dc9dc135 XL |
621 | ); |
622 | Operand::Immediate(Immediate::new_slice( | |
136023e0 | 623 | Scalar::from_pointer(self.global_base_pointer(ptr)?, &*self.tcx), |
ba9703b0 | 624 | u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start` |
dc9dc135 XL |
625 | self, |
626 | )) | |
627 | } | |
dc9dc135 XL |
628 | }; |
629 | Ok(OpTy { op, layout }) | |
b7449926 | 630 | } |
b7449926 XL |
631 | |
632 | /// Read discriminant, return the runtime value as well as the variant index. | |
c295e0f8 | 633 | /// Can also legally be called on non-enums (e.g. through the discriminant_value intrinsic)! |
b7449926 XL |
634 | pub fn read_discriminant( |
635 | &self, | |
6a06907d | 636 | op: &OpTy<'tcx, M::PointerTag>, |
f9f354fc XL |
637 | ) -> InterpResult<'tcx, (Scalar<M::PointerTag>, VariantIdx)> { |
638 | trace!("read_discriminant_value {:#?}", op.layout); | |
f9f354fc XL |
639 | // Get type and layout of the discriminant. |
640 | let discr_layout = self.layout_of(op.layout.ty.discriminant_ty(*self.tcx))?; | |
641 | trace!("discriminant type: {:?}", discr_layout.ty); | |
642 | ||
643 | // We use "discriminant" to refer to the value associated with a particular enum variant. | |
644 | // This is not to be confused with its "variant index", which is just determining its position in the | |
645 | // declared list of variants -- they can differ with explicitly assigned discriminants. | |
646 | // We use "tag" to refer to how the discriminant is encoded in memory, which can be either | |
f035d41b XL |
647 | // straight-forward (`TagEncoding::Direct`) or with a niche (`TagEncoding::Niche`). |
648 | let (tag_scalar_layout, tag_encoding, tag_field) = match op.layout.variants { | |
ba9703b0 | 649 | Variants::Single { index } => { |
f9f354fc XL |
650 | let discr = match op.layout.ty.discriminant_for_variant(*self.tcx, index) { |
651 | Some(discr) => { | |
652 | // This type actually has discriminants. | |
653 | assert_eq!(discr.ty, discr_layout.ty); | |
654 | Scalar::from_uint(discr.val, discr_layout.size) | |
655 | } | |
656 | None => { | |
657 | // On a type without actual discriminants, variant is 0. | |
658 | assert_eq!(index.as_u32(), 0); | |
659 | Scalar::from_uint(index.as_u32(), discr_layout.size) | |
660 | } | |
661 | }; | |
662 | return Ok((discr, index)); | |
b7449926 | 663 | } |
c295e0f8 | 664 | Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => { |
f035d41b | 665 | (tag, tag_encoding, tag_field) |
ba9703b0 | 666 | } |
532ac7d7 XL |
667 | }; |
668 | ||
f9f354fc XL |
669 | // There are *three* layouts that come into play here: |
670 | // - The discriminant has a type for typechecking. This is `discr_layout`, and is used for | |
671 | // the `Scalar` we return. | |
672 | // - The tag (encoded discriminant) has layout `tag_layout`. This is always an integer type, | |
673 | // and used to interpret the value we read from the tag field. | |
674 | // For the return value, a cast to `discr_layout` is performed. | |
675 | // - The field storing the tag has a layout, which is very similar to `tag_layout` but | |
676 | // may be a pointer. This is `tag_val.layout`; we just use it for sanity checks. | |
677 | ||
678 | // Get layout for tag. | |
679 | let tag_layout = self.layout_of(tag_scalar_layout.value.to_int_ty(*self.tcx))?; | |
680 | ||
681 | // Read tag and sanity-check `tag_layout`. | |
6a06907d | 682 | let tag_val = self.read_immediate(&self.operand_field(op, tag_field)?)?; |
f9f354fc XL |
683 | assert_eq!(tag_layout.size, tag_val.layout.size); |
684 | assert_eq!(tag_layout.abi.is_signed(), tag_val.layout.abi.is_signed()); | |
685 | let tag_val = tag_val.to_scalar()?; | |
686 | trace!("tag value: {:?}", tag_val); | |
687 | ||
688 | // Figure out which discriminant and variant this corresponds to. | |
f035d41b XL |
689 | Ok(match *tag_encoding { |
690 | TagEncoding::Direct => { | |
136023e0 XL |
691 | let tag_bits = tag_val |
692 | .try_to_int() | |
693 | .map_err(|dbg_val| err_ub!(InvalidTag(dbg_val)))? | |
694 | .assert_bits(tag_layout.size); | |
f9f354fc | 695 | // Cast bits from tag layout to discriminant layout. |
f035d41b XL |
696 | let discr_val = self.cast_from_scalar(tag_bits, tag_layout, discr_layout.ty); |
697 | let discr_bits = discr_val.assert_bits(discr_layout.size); | |
f9f354fc | 698 | // Convert discriminant to variant index, and catch invalid discriminants. |
1b1a35ee | 699 | let index = match *op.layout.ty.kind() { |
dfeec247 | 700 | ty::Adt(adt, _) => { |
f035d41b | 701 | adt.discriminants(*self.tcx).find(|(_, var)| var.val == discr_bits) |
dfeec247 | 702 | } |
e74abb32 XL |
703 | ty::Generator(def_id, substs, _) => { |
704 | let substs = substs.as_generator(); | |
705 | substs | |
f035d41b | 706 | .discriminants(def_id, *self.tcx) |
f9f354fc | 707 | .find(|(_, var)| var.val == discr_bits) |
e74abb32 | 708 | } |
f035d41b | 709 | _ => span_bug!(self.cur_span(), "tagged layout for non-adt non-generator"), |
dfeec247 | 710 | } |
136023e0 | 711 | .ok_or_else(|| err_ub!(InvalidTag(Scalar::from_uint(tag_bits, tag_layout.size))))?; |
f9f354fc | 712 | // Return the cast value, and the index. |
f035d41b | 713 | (discr_val, index.0) |
dfeec247 | 714 | } |
f035d41b | 715 | TagEncoding::Niche { dataful_variant, ref niche_variants, niche_start } => { |
f9f354fc XL |
716 | // Compute the variant this niche value/"tag" corresponds to. With niche layout, |
717 | // discriminant (encoded in niche/tag) and variant index are the same. | |
e1599b0c XL |
718 | let variants_start = niche_variants.start().as_u32(); |
719 | let variants_end = niche_variants.end().as_u32(); | |
136023e0 XL |
720 | let variant = match tag_val.try_to_int() { |
721 | Err(dbg_val) => { | |
722 | // So this is a pointer then, and casting to an int failed. | |
723 | // Can only happen during CTFE. | |
724 | let ptr = self.scalar_to_ptr(tag_val); | |
725 | // The niche must be just 0, and the ptr not null, then we know this is | |
726 | // okay. Everything else, we conservatively reject. | |
dfeec247 XL |
727 | let ptr_valid = niche_start == 0 |
728 | && variants_start == variants_end | |
729 | && !self.memory.ptr_may_be_null(ptr); | |
a1dfa0c6 | 730 | if !ptr_valid { |
136023e0 | 731 | throw_ub!(InvalidTag(dbg_val)) |
a1dfa0c6 | 732 | } |
f9f354fc | 733 | dataful_variant |
dfeec247 | 734 | } |
f9f354fc | 735 | Ok(tag_bits) => { |
136023e0 | 736 | let tag_bits = tag_bits.assert_bits(tag_layout.size); |
e1599b0c | 737 | // We need to use machine arithmetic to get the relative variant idx: |
f9f354fc XL |
738 | // variant_index_relative = tag_val - niche_start_val |
739 | let tag_val = ImmTy::from_uint(tag_bits, tag_layout); | |
740 | let niche_start_val = ImmTy::from_uint(niche_start, tag_layout); | |
dfeec247 | 741 | let variant_index_relative_val = |
6a06907d | 742 | self.binary_op(mir::BinOp::Sub, &tag_val, &niche_start_val)?; |
e1599b0c XL |
743 | let variant_index_relative = variant_index_relative_val |
744 | .to_scalar()? | |
f9f354fc | 745 | .assert_bits(tag_val.layout.size); |
e1599b0c XL |
746 | // Check if this is in the range that indicates an actual discriminant. |
747 | if variant_index_relative <= u128::from(variants_end - variants_start) { | |
748 | let variant_index_relative = u32::try_from(variant_index_relative) | |
749 | .expect("we checked that this fits into a u32"); | |
750 | // Then computing the absolute variant idx should not overflow any more. | |
751 | let variant_index = variants_start | |
752 | .checked_add(variant_index_relative) | |
74b04a01 | 753 | .expect("overflow computing absolute variant idx"); |
f9f354fc | 754 | let variants_len = op |
dfeec247 XL |
755 | .layout |
756 | .ty | |
a1dfa0c6 XL |
757 | .ty_adt_def() |
758 | .expect("tagged layout for non adt") | |
dfeec247 XL |
759 | .variants |
760 | .len(); | |
ba9703b0 | 761 | assert!(usize::try_from(variant_index).unwrap() < variants_len); |
f9f354fc | 762 | VariantIdx::from_u32(variant_index) |
b7449926 | 763 | } else { |
f9f354fc | 764 | dataful_variant |
b7449926 | 765 | } |
dfeec247 | 766 | } |
f9f354fc XL |
767 | }; |
768 | // Compute the size of the scalar we need to return. | |
769 | // No need to cast, because the variant index directly serves as discriminant and is | |
770 | // encoded in the tag. | |
771 | (Scalar::from_uint(variant.as_u32(), discr_layout.size), variant) | |
b7449926 XL |
772 | } |
773 | }) | |
774 | } | |
b7449926 | 775 | } |