]> git.proxmox.com Git - rustc.git/blame - src/librustc_mir/interpret/operand.rs
New upstream version 1.44.1+dfsg1
[rustc.git] / src / librustc_mir / interpret / operand.rs
CommitLineData
b7449926
XL
1//! Functions concerning immediate values and operands, and reading from operands.
2//! All high-level functions to read from memory work on operands as sources.
3
ba9703b0
XL
4use std::convert::TryFrom;
5use std::fmt::Write;
b7449926 6
ba9703b0
XL
7use rustc_errors::ErrorReported;
8use rustc_hir::def::Namespace;
60c5eb7d 9use rustc_macros::HashStable;
ba9703b0
XL
10use rustc_middle::ty::layout::{IntegerExt, PrimitiveExt, TyAndLayout};
11use rustc_middle::ty::print::{FmtPrinter, PrettyPrinter, Printer};
12use rustc_middle::ty::Ty;
13use rustc_middle::{mir, ty};
14use rustc_target::abi::{Abi, DiscriminantKind, HasDataLayout, Integer, LayoutOf, Size};
15use rustc_target::abi::{VariantIdx, Variants};
16
17use super::{
18 from_known_layout, sign_extend, truncate, AllocId, ConstValue, GlobalId, InterpCx,
19 InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, Scalar, ScalarMaybeUndef,
20};
0bf4aa26 21
e74abb32 22/// An `Immediate` represents a single immediate self-contained Rust value.
b7449926
XL
23///
24/// For optimization of a few very common cases, there is also a representation for a pair of
25/// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary
60c5eb7d 26/// operations and wide pointers. This idea was taken from rustc's codegen.
b7449926 27/// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
a1dfa0c6 28/// defined on `Immediate`, and do not have to work with a `Place`.
60c5eb7d 29#[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable, Hash)]
dfeec247 30pub enum Immediate<Tag = (), Id = AllocId> {
0bf4aa26
XL
31 Scalar(ScalarMaybeUndef<Tag, Id>),
32 ScalarPair(ScalarMaybeUndef<Tag, Id>, ScalarMaybeUndef<Tag, Id>),
b7449926
XL
33}
34
416331ca
XL
35impl<Tag> From<ScalarMaybeUndef<Tag>> for Immediate<Tag> {
36 #[inline(always)]
37 fn from(val: ScalarMaybeUndef<Tag>) -> Self {
38 Immediate::Scalar(val)
39 }
40}
41
42impl<Tag> From<Scalar<Tag>> for Immediate<Tag> {
43 #[inline(always)]
44 fn from(val: Scalar<Tag>) -> Self {
45 Immediate::Scalar(val.into())
9fa01778 46 }
416331ca 47}
9fa01778 48
60c5eb7d
XL
49impl<Tag> From<Pointer<Tag>> for Immediate<Tag> {
50 #[inline(always)]
51 fn from(val: Pointer<Tag>) -> Self {
52 Immediate::Scalar(Scalar::from(val).into())
53 }
54}
55
416331ca 56impl<'tcx, Tag> Immediate<Tag> {
dfeec247 57 pub fn new_slice(val: Scalar<Tag>, len: u64, cx: &impl HasDataLayout) -> Self {
ba9703b0 58 Immediate::ScalarPair(val.into(), Scalar::from_machine_usize(len, cx).into())
b7449926
XL
59 }
60
0bf4aa26 61 pub fn new_dyn_trait(val: Scalar<Tag>, vtable: Pointer<Tag>) -> Self {
60c5eb7d 62 Immediate::ScalarPair(val.into(), vtable.into())
b7449926
XL
63 }
64
65 #[inline]
0bf4aa26 66 pub fn to_scalar_or_undef(self) -> ScalarMaybeUndef<Tag> {
b7449926 67 match self {
a1dfa0c6 68 Immediate::Scalar(val) => val,
60c5eb7d 69 Immediate::ScalarPair(..) => bug!("Got a wide pointer where a scalar was expected"),
b7449926
XL
70 }
71 }
72
73 #[inline]
dc9dc135 74 pub fn to_scalar(self) -> InterpResult<'tcx, Scalar<Tag>> {
b7449926
XL
75 self.to_scalar_or_undef().not_undef()
76 }
77
78 #[inline]
dc9dc135 79 pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
b7449926 80 match self {
a1dfa0c6 81 Immediate::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
dfeec247 82 Immediate::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?)),
b7449926
XL
83 }
84 }
b7449926
XL
85}
86
a1dfa0c6 87// ScalarPair needs a type to interpret, so we often have an immediate and a type together
b7449926
XL
88// as input for binary and cast operations.
89#[derive(Copy, Clone, Debug)]
dfeec247 90pub struct ImmTy<'tcx, Tag = ()> {
ba9703b0
XL
91 imm: Immediate<Tag>,
92 pub layout: TyAndLayout<'tcx>,
b7449926
XL
93}
94
60c5eb7d 95impl<Tag: Copy> std::fmt::Display for ImmTy<'tcx, Tag> {
ba9703b0
XL
96 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
97 /// Helper function for printing a scalar to a FmtPrinter
98 fn p<'a, 'tcx, F: std::fmt::Write, Tag>(
99 cx: FmtPrinter<'a, 'tcx, F>,
100 s: ScalarMaybeUndef<Tag>,
101 ty: Ty<'tcx>,
102 ) -> Result<FmtPrinter<'a, 'tcx, F>, std::fmt::Error> {
103 match s {
104 ScalarMaybeUndef::Scalar(s) => {
105 cx.pretty_print_const_scalar(s.erase_tag(), ty, true)
106 }
107 ScalarMaybeUndef::Undef => cx.typed_value(
108 |mut this| {
109 this.write_str("{undef ")?;
110 Ok(this)
111 },
112 |this| this.print_type(ty),
113 " ",
114 ),
74b04a01 115 }
ba9703b0
XL
116 }
117 ty::tls::with(|tcx| {
118 match self.imm {
119 Immediate::Scalar(s) => {
120 if let Some(ty) = tcx.lift(&self.layout.ty) {
121 let cx = FmtPrinter::new(tcx, f, Namespace::ValueNS);
122 p(cx, s, ty)?;
123 return Ok(());
60c5eb7d 124 }
ba9703b0
XL
125 write!(f, "{:?}: {}", s.erase_tag(), self.layout.ty)
126 }
127 Immediate::ScalarPair(a, b) => {
128 // FIXME(oli-obk): at least print tuples and slices nicely
129 write!(f, "({:?}, {:?}): {}", a.erase_tag(), b.erase_tag(), self.layout.ty,)
dfeec247 130 }
74b04a01 131 }
ba9703b0 132 })
60c5eb7d
XL
133 }
134}
135
a1dfa0c6
XL
136impl<'tcx, Tag> ::std::ops::Deref for ImmTy<'tcx, Tag> {
137 type Target = Immediate<Tag>;
b7449926 138 #[inline(always)]
a1dfa0c6 139 fn deref(&self) -> &Immediate<Tag> {
9fa01778 140 &self.imm
b7449926
XL
141 }
142}
143
144/// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
9fa01778 145/// or still in memory. The latter is an optimization, to delay reading that chunk of
b7449926 146/// memory and to avoid having to store arbitrary-sized data here.
60c5eb7d 147#[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable, Hash)]
dfeec247 148pub enum Operand<Tag = (), Id = AllocId> {
a1dfa0c6 149 Immediate(Immediate<Tag, Id>),
0bf4aa26 150 Indirect(MemPlace<Tag, Id>),
b7449926
XL
151}
152
60c5eb7d 153#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
dfeec247 154pub struct OpTy<'tcx, Tag = ()> {
60c5eb7d 155 op: Operand<Tag>, // Keep this private; it helps enforce invariants.
ba9703b0 156 pub layout: TyAndLayout<'tcx>,
b7449926
XL
157}
158
0bf4aa26
XL
159impl<'tcx, Tag> ::std::ops::Deref for OpTy<'tcx, Tag> {
160 type Target = Operand<Tag>;
b7449926 161 #[inline(always)]
0bf4aa26 162 fn deref(&self) -> &Operand<Tag> {
b7449926
XL
163 &self.op
164 }
165}
166
0bf4aa26 167impl<'tcx, Tag: Copy> From<MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
b7449926 168 #[inline(always)]
0bf4aa26 169 fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self {
dfeec247 170 OpTy { op: Operand::Indirect(*mplace), layout: mplace.layout }
b7449926
XL
171 }
172}
173
a1dfa0c6 174impl<'tcx, Tag> From<ImmTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
b7449926 175 #[inline(always)]
a1dfa0c6 176 fn from(val: ImmTy<'tcx, Tag>) -> Self {
dfeec247 177 OpTy { op: Operand::Immediate(val.imm), layout: val.layout }
b7449926
XL
178 }
179}
180
e1599b0c 181impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag> {
9fa01778 182 #[inline]
ba9703b0 183 pub fn from_scalar(val: Scalar<Tag>, layout: TyAndLayout<'tcx>) -> Self {
416331ca 184 ImmTy { imm: val.into(), layout }
9fa01778
XL
185 }
186
dfeec247 187 #[inline]
ba9703b0
XL
188 pub fn from_immediate(imm: Immediate<Tag>, layout: TyAndLayout<'tcx>) -> Self {
189 ImmTy { imm, layout }
190 }
191
192 #[inline]
193 pub fn try_from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Option<Self> {
dfeec247
XL
194 Some(Self::from_scalar(Scalar::try_from_uint(i, layout.size)?, layout))
195 }
e1599b0c 196 #[inline]
ba9703b0 197 pub fn from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self {
e1599b0c
XL
198 Self::from_scalar(Scalar::from_uint(i, layout.size), layout)
199 }
200
dfeec247 201 #[inline]
ba9703b0 202 pub fn try_from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Option<Self> {
dfeec247
XL
203 Some(Self::from_scalar(Scalar::try_from_int(i, layout.size)?, layout))
204 }
205
e1599b0c 206 #[inline]
ba9703b0 207 pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self {
e1599b0c
XL
208 Self::from_scalar(Scalar::from_int(i, layout.size), layout)
209 }
9fa01778
XL
210}
211
ba9703b0 212impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
416331ca
XL
213 /// Normalice `place.ptr` to a `Pointer` if this is a place and not a ZST.
214 /// Can be helpful to avoid lots of `force_ptr` calls later, if this place is used a lot.
215 #[inline]
216 pub fn force_op_ptr(
217 &self,
218 op: OpTy<'tcx, M::PointerTag>,
219 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
dfeec247 220 match op.try_as_mplace(self) {
416331ca
XL
221 Ok(mplace) => Ok(self.force_mplace_ptr(mplace)?.into()),
222 Err(imm) => Ok(imm.into()), // Nothing to cast/force
223 }
224 }
225
dc9dc135 226 /// Try reading an immediate in memory; this is interesting particularly for `ScalarPair`.
9fa01778
XL
227 /// Returns `None` if the layout does not permit loading this as a value.
228 fn try_read_immediate_from_mplace(
b7449926 229 &self,
0bf4aa26 230 mplace: MPlaceTy<'tcx, M::PointerTag>,
dc9dc135 231 ) -> InterpResult<'tcx, Option<ImmTy<'tcx, M::PointerTag>>> {
b7449926 232 if mplace.layout.is_unsized() {
0bf4aa26 233 // Don't touch unsized
b7449926
XL
234 return Ok(None);
235 }
b7449926 236
dfeec247
XL
237 let ptr = match self
238 .check_mplace_access(mplace, None)
e1599b0c
XL
239 .expect("places should be checked on creation")
240 {
dc9dc135 241 Some(ptr) => ptr,
dfeec247
XL
242 None => {
243 return Ok(Some(ImmTy {
244 // zero-sized type
245 imm: Scalar::zst().into(),
246 layout: mplace.layout,
247 }));
248 }
dc9dc135 249 };
b7449926 250
b7449926 251 match mplace.layout.abi {
ba9703b0 252 Abi::Scalar(..) => {
dfeec247
XL
253 let scalar = self.memory.get_raw(ptr.alloc_id)?.read_scalar(
254 self,
255 ptr,
256 mplace.layout.size,
257 )?;
258 Ok(Some(ImmTy { imm: scalar.into(), layout: mplace.layout }))
b7449926 259 }
ba9703b0 260 Abi::ScalarPair(ref a, ref b) => {
dc9dc135
XL
261 // We checked `ptr_align` above, so all fields will have the alignment they need.
262 // We would anyway check against `ptr_align.restrict_for_offset(b_offset)`,
263 // which `ptr.offset(b_offset)` cannot possibly fail to satisfy.
b7449926
XL
264 let (a, b) = (&a.value, &b.value);
265 let (a_size, b_size) = (a.size(self), b.size(self));
266 let a_ptr = ptr;
a1dfa0c6 267 let b_offset = a_size.align_to(b.align(self).abi);
dc9dc135 268 assert!(b_offset.bytes() > 0); // we later use the offset to tell apart the fields
a1dfa0c6 269 let b_ptr = ptr.offset(b_offset, self)?;
dfeec247
XL
270 let a_val = self.memory.get_raw(ptr.alloc_id)?.read_scalar(self, a_ptr, a_size)?;
271 let b_val = self.memory.get_raw(ptr.alloc_id)?.read_scalar(self, b_ptr, b_size)?;
272 Ok(Some(ImmTy { imm: Immediate::ScalarPair(a_val, b_val), layout: mplace.layout }))
b7449926
XL
273 }
274 _ => Ok(None),
275 }
276 }
277
a1dfa0c6
XL
278 /// Try returning an immediate for the operand.
279 /// If the layout does not permit loading this as an immediate, return where in memory
b7449926
XL
280 /// we can find the data.
281 /// Note that for a given layout, this operation will either always fail or always
282 /// succeed! Whether it succeeds depends on whether the layout can be represented
a1dfa0c6 283 /// in a `Immediate`, not on which data is stored there currently.
dc9dc135 284 pub(crate) fn try_read_immediate(
b7449926 285 &self,
0bf4aa26 286 src: OpTy<'tcx, M::PointerTag>,
dc9dc135 287 ) -> InterpResult<'tcx, Result<ImmTy<'tcx, M::PointerTag>, MPlaceTy<'tcx, M::PointerTag>>> {
dfeec247 288 Ok(match src.try_as_mplace(self) {
b7449926 289 Ok(mplace) => {
a1dfa0c6 290 if let Some(val) = self.try_read_immediate_from_mplace(mplace)? {
b7449926
XL
291 Ok(val)
292 } else {
dc9dc135 293 Err(mplace)
b7449926 294 }
dfeec247 295 }
b7449926
XL
296 Err(val) => Ok(val),
297 })
298 }
299
a1dfa0c6 300 /// Read an immediate from a place, asserting that that is possible with the given layout.
b7449926 301 #[inline(always)]
a1dfa0c6 302 pub fn read_immediate(
0bf4aa26 303 &self,
dfeec247 304 op: OpTy<'tcx, M::PointerTag>,
dc9dc135 305 ) -> InterpResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
9fa01778 306 if let Ok(imm) = self.try_read_immediate(op)? {
dc9dc135 307 Ok(imm)
b7449926
XL
308 } else {
309 bug!("primitive read failed for type: {:?}", op.layout.ty);
310 }
311 }
312
313 /// Read a scalar from a place
0bf4aa26
XL
314 pub fn read_scalar(
315 &self,
dfeec247 316 op: OpTy<'tcx, M::PointerTag>,
dc9dc135 317 ) -> InterpResult<'tcx, ScalarMaybeUndef<M::PointerTag>> {
a1dfa0c6 318 Ok(self.read_immediate(op)?.to_scalar_or_undef())
b7449926
XL
319 }
320
60c5eb7d 321 // Turn the wide MPlace into a string (must already be dereferenced!)
dfeec247 322 pub fn read_str(&self, mplace: MPlaceTy<'tcx, M::PointerTag>) -> InterpResult<'tcx, &str> {
b7449926 323 let len = mplace.len(self)?;
ba9703b0 324 let bytes = self.memory.read_bytes(mplace.ptr, Size::from_bytes(len))?;
dfeec247 325 let str = ::std::str::from_utf8(bytes)
ba9703b0 326 .map_err(|err| err_ub_format!("this string is not valid UTF-8: {}", err))?;
b7449926
XL
327 Ok(str)
328 }
329
b7449926
XL
330 /// Projection functions
331 pub fn operand_field(
332 &self,
0bf4aa26 333 op: OpTy<'tcx, M::PointerTag>,
ba9703b0 334 field: usize,
dc9dc135 335 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
dfeec247 336 let base = match op.try_as_mplace(self) {
b7449926 337 Ok(mplace) => {
ba9703b0 338 // We can reuse the mplace field computation logic for indirect operands.
b7449926
XL
339 let field = self.mplace_field(mplace, field)?;
340 return Ok(field.into());
dfeec247
XL
341 }
342 Err(value) => value,
b7449926
XL
343 };
344
b7449926
XL
345 let field_layout = op.layout.field(self, field)?;
346 if field_layout.is_zst() {
416331ca 347 let immediate = Scalar::zst().into();
a1dfa0c6 348 return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout });
b7449926
XL
349 }
350 let offset = op.layout.fields.offset(field);
dc9dc135 351 let immediate = match *base {
b7449926 352 // the field covers the entire type
dc9dc135 353 _ if offset.bytes() == 0 && field_layout.size == op.layout.size => *base,
b7449926 354 // extract fields from types with `ScalarPair` ABI
a1dfa0c6 355 Immediate::ScalarPair(a, b) => {
b7449926 356 let val = if offset.bytes() == 0 { a } else { b };
416331ca 357 Immediate::from(val)
dfeec247
XL
358 }
359 Immediate::Scalar(val) => {
360 bug!("field access on non aggregate {:#?}, {:#?}", val, op.layout)
361 }
b7449926 362 };
a1dfa0c6 363 Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout })
b7449926
XL
364 }
365
ba9703b0
XL
366 pub fn operand_index(
367 &self,
368 op: OpTy<'tcx, M::PointerTag>,
369 index: u64,
370 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
371 if let Ok(index) = usize::try_from(index) {
372 // We can just treat this as a field.
373 self.operand_field(op, index)
374 } else {
375 // Indexing into a big array. This must be an mplace.
376 let mplace = op.assert_mem_place(self);
377 Ok(self.mplace_index(mplace, index)?.into())
378 }
379 }
380
b7449926
XL
381 pub fn operand_downcast(
382 &self,
0bf4aa26 383 op: OpTy<'tcx, M::PointerTag>,
a1dfa0c6 384 variant: VariantIdx,
dc9dc135 385 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
b7449926 386 // Downcasts only change the layout
dfeec247
XL
387 Ok(match op.try_as_mplace(self) {
388 Ok(mplace) => self.mplace_downcast(mplace, variant)?.into(),
b7449926
XL
389 Err(..) => {
390 let layout = op.layout.for_variant(self, variant);
391 OpTy { layout, ..op }
392 }
393 })
394 }
395
b7449926
XL
396 pub fn operand_projection(
397 &self,
0bf4aa26 398 base: OpTy<'tcx, M::PointerTag>,
b7449926 399 proj_elem: &mir::PlaceElem<'tcx>,
dc9dc135 400 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
ba9703b0 401 use rustc_middle::mir::ProjectionElem::*;
b7449926 402 Ok(match *proj_elem {
ba9703b0 403 Field(field, _) => self.operand_field(base, field.index())?,
b7449926
XL
404 Downcast(_, variant) => self.operand_downcast(base, variant)?,
405 Deref => self.deref_operand(base)?.into(),
dfeec247 406 Subslice { .. } | ConstantIndex { .. } | Index(_) => {
b7449926
XL
407 // The rest should only occur as mplace, we do not use Immediates for types
408 // allowing such operations. This matches place_projection forcing an allocation.
dfeec247 409 let mplace = base.assert_mem_place(self);
b7449926
XL
410 self.mplace_projection(mplace, proj_elem)?.into()
411 }
412 })
413 }
414
0bf4aa26 415 /// This is used by [priroda](https://github.com/oli-obk/priroda) to get an OpTy from a local
0bf4aa26
XL
416 pub fn access_local(
417 &self,
a1dfa0c6 418 frame: &super::Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
0bf4aa26 419 local: mir::Local,
ba9703b0 420 layout: Option<TyAndLayout<'tcx>>,
dc9dc135 421 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
0bf4aa26 422 assert_ne!(local, mir::RETURN_PLACE);
9fa01778 423 let layout = self.layout_of_local(frame, local, layout)?;
48663c56
XL
424 let op = if layout.is_zst() {
425 // Do not read from ZST, they might not be initialized
416331ca 426 Operand::Immediate(Scalar::zst().into())
48663c56 427 } else {
e74abb32 428 M::access_local(&self, frame, local)?
48663c56 429 };
0bf4aa26
XL
430 Ok(OpTy { op, layout })
431 }
432
ba9703b0
XL
433 /// Every place can be read from, so we can turn them into an operand.
434 /// This will definitely return `Indirect` if the place is a `Ptr`, i.e., this
435 /// will never actually read from memory.
9fa01778
XL
436 #[inline(always)]
437 pub fn place_to_op(
438 &self,
dfeec247 439 place: PlaceTy<'tcx, M::PointerTag>,
dc9dc135 440 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
9fa01778 441 let op = match *place {
dfeec247 442 Place::Ptr(mplace) => Operand::Indirect(mplace),
ba9703b0
XL
443 Place::Local { frame, local } => {
444 *self.access_local(&self.stack()[frame], local, None)?
445 }
9fa01778
XL
446 };
447 Ok(OpTy { op, layout: place.layout })
448 }
449
b7449926 450 // Evaluate a place with the goal of reading from it. This lets us sometimes
9fa01778 451 // avoid allocations.
e74abb32 452 pub fn eval_place_to_op(
b7449926 453 &self,
ba9703b0
XL
454 place: mir::Place<'tcx>,
455 layout: Option<TyAndLayout<'tcx>>,
dc9dc135 456 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
dfeec247 457 let base_op = match place.local {
ba9703b0 458 mir::RETURN_PLACE => throw_ub!(ReadFromReturnPlace),
dfeec247 459 local => {
e1599b0c
XL
460 // Do not use the layout passed in as argument if the base we are looking at
461 // here is not the entire place.
dfeec247 462 let layout = if place.projection.is_empty() { layout } else { None };
b7449926 463
dfeec247 464 self.access_local(self.frame(), local, layout)?
dc9dc135 465 }
e1599b0c 466 };
b7449926 467
dfeec247
XL
468 let op = place
469 .projection
470 .iter()
471 .try_fold(base_op, |op, elem| self.operand_projection(op, elem))?;
e1599b0c
XL
472
473 trace!("eval_place_to_op: got {:?}", *op);
474 Ok(op)
b7449926
XL
475 }
476
477 /// Evaluate the operand, returning a place where you can then find the data.
dc9dc135 478 /// If you already know the layout, you can save two table lookups
b7449926
XL
479 /// by passing it in here.
480 pub fn eval_operand(
481 &self,
482 mir_op: &mir::Operand<'tcx>,
ba9703b0 483 layout: Option<TyAndLayout<'tcx>>,
dc9dc135 484 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
ba9703b0 485 use rustc_middle::mir::Operand::*;
b7449926
XL
486 let op = match *mir_op {
487 // FIXME: do some more logic on `move` to invalidate the old location
ba9703b0 488 Copy(place) | Move(place) => self.eval_place_to_op(place, layout)?,
b7449926 489
e1599b0c 490 Constant(ref constant) => {
ba9703b0
XL
491 let val =
492 self.subst_from_current_frame_and_normalize_erasing_regions(constant.literal);
e1599b0c
XL
493 self.eval_const_to_op(val, layout)?
494 }
b7449926
XL
495 };
496 trace!("{:?}: {:?}", mir_op, *op);
497 Ok(op)
498 }
499
500 /// Evaluate a bunch of operands at once
501 pub(super) fn eval_operands(
502 &self,
503 ops: &[mir::Operand<'tcx>],
dc9dc135 504 ) -> InterpResult<'tcx, Vec<OpTy<'tcx, M::PointerTag>>> {
74b04a01 505 ops.iter().map(|op| self.eval_operand(op, None)).collect()
b7449926
XL
506 }
507
9fa01778
XL
508 // Used when the miri-engine runs into a constant and for extracting information from constants
509 // in patterns via the `const_eval` module
e1599b0c
XL
510 /// The `val` and `layout` are assumed to already be in our interpreter
511 /// "universe" (param_env).
532ac7d7 512 crate fn eval_const_to_op(
9fa01778 513 &self,
74b04a01 514 val: &ty::Const<'tcx>,
ba9703b0 515 layout: Option<TyAndLayout<'tcx>>,
dc9dc135
XL
516 ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> {
517 let tag_scalar = |scalar| match scalar {
ba9703b0 518 Scalar::Ptr(ptr) => Scalar::Ptr(self.tag_global_base_pointer(ptr)),
dc9dc135
XL
519 Scalar::Raw { data, size } => Scalar::Raw { data, size },
520 };
521 // Early-return cases.
60c5eb7d 522 let val_val = match val.val {
dfeec247 523 ty::ConstKind::Param(_) => throw_inval!(TooGeneric),
ba9703b0 524 ty::ConstKind::Error => throw_inval!(TypeckError(ErrorReported)),
dfeec247 525 ty::ConstKind::Unevaluated(def_id, substs, promoted) => {
532ac7d7 526 let instance = self.resolve(def_id, substs)?;
dfeec247
XL
527 // We use `const_eval` here and `const_eval_raw` elsewhere in mir interpretation.
528 // The reason we use `const_eval_raw` everywhere else is to prevent cycles during
529 // validation, because validation automatically reads through any references, thus
530 // potentially requiring the current static to be evaluated again. This is not a
531 // problem here, because we are building an operand which means an actual read is
532 // happening.
ba9703b0
XL
533 //
534 // The machine callback `adjust_global_const` below is guaranteed to
535 // be called for all constants because `const_eval` calls
536 // `eval_const_to_op` recursively.
74b04a01 537 return Ok(self.const_eval(GlobalId { instance, promoted }, val.ty)?);
dfeec247
XL
538 }
539 ty::ConstKind::Infer(..)
540 | ty::ConstKind::Bound(..)
541 | ty::ConstKind::Placeholder(..) => {
542 bug!("eval_const_to_op: Unexpected ConstKind {:?}", val)
dc9dc135 543 }
60c5eb7d
XL
544 ty::ConstKind::Value(val_val) => val_val,
545 };
ba9703b0
XL
546 // This call allows the machine to create fresh allocation ids for
547 // thread-local statics (see the `adjust_global_const` function
548 // documentation).
549 let val_val = M::adjust_global_const(self, val_val)?;
dc9dc135 550 // Other cases need layout.
ba9703b0 551 let layout = from_known_layout(self.tcx, layout, || self.layout_of(val.ty))?;
60c5eb7d 552 let op = match val_val {
416331ca 553 ConstValue::ByRef { alloc, offset } => {
dc9dc135
XL
554 let id = self.tcx.alloc_map.lock().create_memory_alloc(alloc);
555 // We rely on mutability being set correctly in that allocation to prevent writes
556 // where none should happen.
ba9703b0 557 let ptr = self.tag_global_base_pointer(Pointer::new(id, offset));
416331ca 558 Operand::Indirect(MemPlace::from_ptr(ptr, layout.align.abi))
dfeec247 559 }
e74abb32 560 ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x).into()),
dc9dc135
XL
561 ConstValue::Slice { data, start, end } => {
562 // We rely on mutability being set correctly in `data` to prevent writes
563 // where none should happen.
564 let ptr = Pointer::new(
565 self.tcx.alloc_map.lock().create_memory_alloc(data),
ba9703b0 566 Size::from_bytes(start), // offset: `start`
dc9dc135
XL
567 );
568 Operand::Immediate(Immediate::new_slice(
ba9703b0
XL
569 self.tag_global_base_pointer(ptr).into(),
570 u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start`
dc9dc135
XL
571 self,
572 ))
573 }
dc9dc135
XL
574 };
575 Ok(OpTy { op, layout })
b7449926 576 }
b7449926
XL
577
578 /// Read discriminant, return the runtime value as well as the variant index.
579 pub fn read_discriminant(
580 &self,
0bf4aa26 581 rval: OpTy<'tcx, M::PointerTag>,
dc9dc135 582 ) -> InterpResult<'tcx, (u128, VariantIdx)> {
b7449926 583 trace!("read_discriminant_value {:#?}", rval.layout);
b7449926 584
e1599b0c 585 let (discr_layout, discr_kind, discr_index) = match rval.layout.variants {
ba9703b0 586 Variants::Single { index } => {
dfeec247
XL
587 let discr_val = rval
588 .layout
589 .ty
590 .discriminant_for_variant(*self.tcx, index)
ba9703b0 591 .map_or(u128::from(index.as_u32()), |discr| discr.val);
b7449926
XL
592 return Ok((discr_val, index));
593 }
ba9703b0
XL
594 Variants::Multiple { discr: ref discr_layout, ref discr_kind, discr_index, .. } => {
595 (discr_layout, discr_kind, discr_index)
596 }
532ac7d7
XL
597 };
598
b7449926 599 // read raw discriminant value
ba9703b0 600 let discr_op = self.operand_field(rval, discr_index)?;
a1dfa0c6
XL
601 let discr_val = self.read_immediate(discr_op)?;
602 let raw_discr = discr_val.to_scalar_or_undef();
b7449926
XL
603 trace!("discr value: {:?}", raw_discr);
604 // post-process
532ac7d7 605 Ok(match *discr_kind {
ba9703b0 606 DiscriminantKind::Tag => {
e1599b0c
XL
607 let bits_discr = raw_discr
608 .not_undef()
609 .and_then(|raw_discr| self.force_bits(raw_discr, discr_val.layout.size))
e74abb32 610 .map_err(|_| err_ub!(InvalidDiscriminant(raw_discr.erase_tag())))?;
ba9703b0 611 let real_discr = if discr_val.layout.abi.is_signed() {
b7449926 612 // going from layout tag type to typeck discriminant type
e1599b0c 613 // requires first sign extending with the discriminant layout
ba9703b0 614 let sexted = sign_extend(bits_discr, discr_val.layout.size);
b7449926 615 // and then zeroing with the typeck discriminant type
dfeec247
XL
616 let discr_ty = rval
617 .layout
618 .ty
619 .ty_adt_def()
620 .expect("tagged layout corresponds to adt")
b7449926
XL
621 .repr
622 .discr_type();
ba9703b0
XL
623 let size = Integer::from_attr(self, discr_ty).size();
624 truncate(sexted, size)
b7449926 625 } else {
a1dfa0c6 626 bits_discr
b7449926
XL
627 };
628 // Make sure we catch invalid discriminants
e74abb32 629 let index = match rval.layout.ty.kind {
dfeec247
XL
630 ty::Adt(adt, _) => {
631 adt.discriminants(self.tcx.tcx).find(|(_, var)| var.val == real_discr)
632 }
e74abb32
XL
633 ty::Generator(def_id, substs, _) => {
634 let substs = substs.as_generator();
635 substs
636 .discriminants(def_id, self.tcx.tcx)
637 .find(|(_, var)| var.val == real_discr)
638 }
48663c56 639 _ => bug!("tagged layout for non-adt non-generator"),
dfeec247
XL
640 }
641 .ok_or_else(|| err_ub!(InvalidDiscriminant(raw_discr.erase_tag())))?;
a1dfa0c6 642 (real_discr, index.0)
dfeec247 643 }
ba9703b0 644 DiscriminantKind::Niche { dataful_variant, ref niche_variants, niche_start } => {
e1599b0c
XL
645 let variants_start = niche_variants.start().as_u32();
646 let variants_end = niche_variants.end().as_u32();
dfeec247
XL
647 let raw_discr = raw_discr
648 .not_undef()
649 .map_err(|_| err_ub!(InvalidDiscriminant(ScalarMaybeUndef::Undef)))?;
dc9dc135
XL
650 match raw_discr.to_bits_or_ptr(discr_val.layout.size, self) {
651 Err(ptr) => {
a1dfa0c6 652 // The niche must be just 0 (which an inbounds pointer value never is)
dfeec247
XL
653 let ptr_valid = niche_start == 0
654 && variants_start == variants_end
655 && !self.memory.ptr_may_be_null(ptr);
a1dfa0c6 656 if !ptr_valid {
e74abb32 657 throw_ub!(InvalidDiscriminant(raw_discr.erase_tag().into()))
a1dfa0c6 658 }
ba9703b0 659 (u128::from(dataful_variant.as_u32()), dataful_variant)
dfeec247 660 }
dc9dc135 661 Ok(raw_discr) => {
e1599b0c
XL
662 // We need to use machine arithmetic to get the relative variant idx:
663 // variant_index_relative = discr_val - niche_start_val
dfeec247
XL
664 let discr_layout =
665 self.layout_of(discr_layout.value.to_int_ty(*self.tcx))?;
e1599b0c
XL
666 let discr_val = ImmTy::from_uint(raw_discr, discr_layout);
667 let niche_start_val = ImmTy::from_uint(niche_start, discr_layout);
dfeec247
XL
668 let variant_index_relative_val =
669 self.binary_op(mir::BinOp::Sub, discr_val, niche_start_val)?;
e1599b0c
XL
670 let variant_index_relative = variant_index_relative_val
671 .to_scalar()?
672 .assert_bits(discr_val.layout.size);
673 // Check if this is in the range that indicates an actual discriminant.
674 if variant_index_relative <= u128::from(variants_end - variants_start) {
675 let variant_index_relative = u32::try_from(variant_index_relative)
676 .expect("we checked that this fits into a u32");
677 // Then computing the absolute variant idx should not overflow any more.
678 let variant_index = variants_start
679 .checked_add(variant_index_relative)
74b04a01 680 .expect("overflow computing absolute variant idx");
dfeec247
XL
681 let variants_len = rval
682 .layout
683 .ty
a1dfa0c6
XL
684 .ty_adt_def()
685 .expect("tagged layout for non adt")
dfeec247
XL
686 .variants
687 .len();
ba9703b0 688 assert!(usize::try_from(variant_index).unwrap() < variants_len);
e1599b0c 689 (u128::from(variant_index), VariantIdx::from_u32(variant_index))
b7449926 690 } else {
e1599b0c 691 (u128::from(dataful_variant.as_u32()), dataful_variant)
b7449926 692 }
dfeec247 693 }
a1dfa0c6 694 }
b7449926
XL
695 }
696 })
697 }
b7449926 698}