]> git.proxmox.com Git - rustc.git/blob - src/librustc/mir/interpret/value.rs
New upstream version 1.41.1+dfsg1
[rustc.git] / src / librustc / mir / interpret / value.rs
1 use std::fmt;
2 use rustc_macros::HashStable;
3 use rustc_apfloat::{Float, ieee::{Double, Single}};
4
5 use crate::ty::{Ty, layout::{HasDataLayout, Size}};
6
7 use super::{InterpResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend, truncate};
8
9 /// Represents the result of a raw const operation, pre-validation.
10 #[derive(Clone, HashStable)]
11 pub struct RawConst<'tcx> {
12 // the value lives here, at offset 0, and that allocation definitely is a `AllocKind::Memory`
13 // (so you can use `AllocMap::unwrap_memory`).
14 pub alloc_id: AllocId,
15 pub ty: Ty<'tcx>,
16 }
17
18 /// Represents a constant value in Rust. `Scalar` and `Slice` are optimizations for
19 /// array length computations, enum discriminants and the pattern matching logic.
20 #[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord,
21 RustcEncodable, RustcDecodable, Hash, HashStable)]
22 pub enum ConstValue<'tcx> {
23 /// Used only for types with `layout::abi::Scalar` ABI and ZSTs.
24 ///
25 /// Not using the enum `Value` to encode that this must not be `Undef`.
26 Scalar(Scalar),
27
28 /// Used only for `&[u8]` and `&str`
29 Slice {
30 data: &'tcx Allocation,
31 start: usize,
32 end: usize,
33 },
34
35 /// A value not represented/representable by `Scalar` or `Slice`
36 ByRef {
37 /// The backing memory of the value, may contain more memory than needed for just the value
38 /// in order to share `Allocation`s between values
39 alloc: &'tcx Allocation,
40 /// Offset into `alloc`
41 offset: Size,
42 },
43 }
44
45 #[cfg(target_arch = "x86_64")]
46 static_assert_size!(ConstValue<'_>, 32);
47
48 impl<'tcx> ConstValue<'tcx> {
49 #[inline]
50 pub fn try_to_scalar(&self) -> Option<Scalar> {
51 match *self {
52 ConstValue::ByRef { .. } |
53 ConstValue::Slice { .. } => None,
54 ConstValue::Scalar(val) => Some(val),
55 }
56 }
57 }
58
59 /// A `Scalar` represents an immediate, primitive value existing outside of a
60 /// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 8 bytes in
61 /// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
62 /// of a simple value or a pointer into another `Allocation`
63 #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd,
64 RustcEncodable, RustcDecodable, Hash, HashStable)]
65 pub enum Scalar<Tag = (), Id = AllocId> {
66 /// The raw bytes of a simple value.
67 Raw {
68 /// The first `size` bytes of `data` are the value.
69 /// Do not try to read less or more bytes than that. The remaining bytes must be 0.
70 data: u128,
71 size: u8,
72 },
73
74 /// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
75 /// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
76 /// relocation and its associated offset together as a `Pointer` here.
77 Ptr(Pointer<Tag, Id>),
78 }
79
80 #[cfg(target_arch = "x86_64")]
81 static_assert_size!(Scalar, 24);
82
83 impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for Scalar<Tag, Id> {
84 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
85 match self {
86 Scalar::Ptr(ptr) =>
87 write!(f, "{:?}", ptr),
88 &Scalar::Raw { data, size } => {
89 Scalar::check_data(data, size);
90 if size == 0 {
91 write!(f, "<ZST>")
92 } else {
93 // Format as hex number wide enough to fit any value of the given `size`.
94 // So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
95 write!(f, "0x{:>0width$x}", data, width=(size*2) as usize)
96 }
97 }
98 }
99 }
100 }
101
102 impl<Tag> fmt::Display for Scalar<Tag> {
103 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
104 match self {
105 Scalar::Ptr(_) => write!(f, "a pointer"),
106 Scalar::Raw { data, .. } => write!(f, "{}", data),
107 }
108 }
109 }
110
111 impl<Tag> From<Single> for Scalar<Tag> {
112 #[inline(always)]
113 fn from(f: Single) -> Self {
114 Scalar::from_f32(f)
115 }
116 }
117
118 impl<Tag> From<Double> for Scalar<Tag> {
119 #[inline(always)]
120 fn from(f: Double) -> Self {
121 Scalar::from_f64(f)
122 }
123 }
124
125 impl Scalar<()> {
126 #[inline(always)]
127 fn check_data(data: u128, size: u8) {
128 debug_assert_eq!(truncate(data, Size::from_bytes(size as u64)), data,
129 "Scalar value {:#x} exceeds size of {} bytes", data, size);
130 }
131
132 /// Tag this scalar with `new_tag` if it is a pointer, leave it unchanged otherwise.
133 ///
134 /// Used by `MemPlace::replace_tag`.
135 #[inline]
136 pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
137 match self {
138 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
139 Scalar::Raw { data, size } => Scalar::Raw { data, size },
140 }
141 }
142 }
143
144 impl<'tcx, Tag> Scalar<Tag> {
145 /// Erase the tag from the scalar, if any.
146 ///
147 /// Used by error reporting code to avoid having the error type depend on `Tag`.
148 #[inline]
149 pub fn erase_tag(self) -> Scalar {
150 match self {
151 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
152 Scalar::Raw { data, size } => Scalar::Raw { data, size },
153 }
154 }
155
156 #[inline]
157 pub fn ptr_null(cx: &impl HasDataLayout) -> Self {
158 Scalar::Raw {
159 data: 0,
160 size: cx.data_layout().pointer_size.bytes() as u8,
161 }
162 }
163
164 #[inline]
165 pub fn zst() -> Self {
166 Scalar::Raw { data: 0, size: 0 }
167 }
168
169 #[inline]
170 pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
171 let dl = cx.data_layout();
172 match self {
173 Scalar::Raw { data, size } => {
174 assert_eq!(size as u64, dl.pointer_size.bytes());
175 Ok(Scalar::Raw {
176 data: dl.offset(data as u64, i.bytes())? as u128,
177 size,
178 })
179 }
180 Scalar::Ptr(ptr) => ptr.offset(i, dl).map(Scalar::Ptr),
181 }
182 }
183
184 #[inline]
185 pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
186 let dl = cx.data_layout();
187 match self {
188 Scalar::Raw { data, size } => {
189 assert_eq!(size as u64, dl.pointer_size.bytes());
190 Scalar::Raw {
191 data: dl.overflowing_offset(data as u64, i.bytes()).0 as u128,
192 size,
193 }
194 }
195 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_offset(i, dl)),
196 }
197 }
198
199 #[inline]
200 pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
201 let dl = cx.data_layout();
202 match self {
203 Scalar::Raw { data, size } => {
204 assert_eq!(size as u64, dl.pointer_size().bytes());
205 Ok(Scalar::Raw {
206 data: dl.signed_offset(data as u64, i)? as u128,
207 size,
208 })
209 }
210 Scalar::Ptr(ptr) => ptr.signed_offset(i, dl).map(Scalar::Ptr),
211 }
212 }
213
214 #[inline]
215 pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
216 let dl = cx.data_layout();
217 match self {
218 Scalar::Raw { data, size } => {
219 assert_eq!(size as u64, dl.pointer_size.bytes());
220 Scalar::Raw {
221 data: dl.overflowing_signed_offset(data as u64, i128::from(i)).0 as u128,
222 size,
223 }
224 }
225 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, dl)),
226 }
227 }
228
229 #[inline]
230 pub fn from_bool(b: bool) -> Self {
231 Scalar::Raw { data: b as u128, size: 1 }
232 }
233
234 #[inline]
235 pub fn from_char(c: char) -> Self {
236 Scalar::Raw { data: c as u128, size: 4 }
237 }
238
239 #[inline]
240 pub fn from_uint(i: impl Into<u128>, size: Size) -> Self {
241 let i = i.into();
242 assert_eq!(
243 truncate(i, size), i,
244 "Unsigned value {:#x} does not fit in {} bits", i, size.bits()
245 );
246 Scalar::Raw { data: i, size: size.bytes() as u8 }
247 }
248
249 #[inline]
250 pub fn from_u8(i: u8) -> Self {
251 Scalar::Raw { data: i as u128, size: 1 }
252 }
253
254 #[inline]
255 pub fn from_u16(i: u16) -> Self {
256 Scalar::Raw { data: i as u128, size: 2 }
257 }
258
259 #[inline]
260 pub fn from_u32(i: u32) -> Self {
261 Scalar::Raw { data: i as u128, size: 4 }
262 }
263
264 #[inline]
265 pub fn from_u64(i: u64) -> Self {
266 Scalar::Raw { data: i as u128, size: 8 }
267 }
268
269 #[inline]
270 pub fn from_int(i: impl Into<i128>, size: Size) -> Self {
271 let i = i.into();
272 // `into` performed sign extension, we have to truncate
273 let truncated = truncate(i as u128, size);
274 assert_eq!(
275 sign_extend(truncated, size) as i128, i,
276 "Signed value {:#x} does not fit in {} bits", i, size.bits()
277 );
278 Scalar::Raw { data: truncated, size: size.bytes() as u8 }
279 }
280
281 #[inline]
282 pub fn from_f32(f: Single) -> Self {
283 // We trust apfloat to give us properly truncated data.
284 Scalar::Raw { data: f.to_bits(), size: 4 }
285 }
286
287 #[inline]
288 pub fn from_f64(f: Double) -> Self {
289 // We trust apfloat to give us properly truncated data.
290 Scalar::Raw { data: f.to_bits(), size: 8 }
291 }
292
293 /// This is very rarely the method you want! You should dispatch on the type
294 /// and use `force_bits`/`assert_bits`/`force_ptr`/`assert_ptr`.
295 /// This method only exists for the benefit of low-level memory operations
296 /// as well as the implementation of the `force_*` methods.
297 #[inline]
298 pub fn to_bits_or_ptr(
299 self,
300 target_size: Size,
301 cx: &impl HasDataLayout,
302 ) -> Result<u128, Pointer<Tag>> {
303 match self {
304 Scalar::Raw { data, size } => {
305 assert_eq!(target_size.bytes(), size as u64);
306 assert_ne!(size, 0, "you should never look at the bits of a ZST");
307 Scalar::check_data(data, size);
308 Ok(data)
309 }
310 Scalar::Ptr(ptr) => {
311 assert_eq!(target_size, cx.data_layout().pointer_size);
312 Err(ptr)
313 }
314 }
315 }
316
317 #[inline(always)]
318 pub fn check_raw(data: u128, size: u8, target_size: Size) {
319 assert_eq!(target_size.bytes(), size as u64);
320 assert_ne!(size, 0, "you should never look at the bits of a ZST");
321 Scalar::check_data(data, size);
322 }
323
324 /// Do not call this method! Use either `assert_bits` or `force_bits`.
325 #[inline]
326 pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
327 match self {
328 Scalar::Raw { data, size } => {
329 Self::check_raw(data, size, target_size);
330 Ok(data)
331 }
332 Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes),
333 }
334 }
335
336 #[inline(always)]
337 pub fn assert_bits(self, target_size: Size) -> u128 {
338 self.to_bits(target_size).expect("expected Raw bits but got a Pointer")
339 }
340
341 /// Do not call this method! Use either `assert_ptr` or `force_ptr`.
342 #[inline]
343 pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
344 match self {
345 Scalar::Raw { data: 0, .. } => throw_unsup!(InvalidNullPointerUsage),
346 Scalar::Raw { .. } => throw_unsup!(ReadBytesAsPointer),
347 Scalar::Ptr(p) => Ok(p),
348 }
349 }
350
351 #[inline(always)]
352 pub fn assert_ptr(self) -> Pointer<Tag> {
353 self.to_ptr().expect("expected a Pointer but got Raw bits")
354 }
355
356 /// Do not call this method! Dispatch based on the type instead.
357 #[inline]
358 pub fn is_bits(self) -> bool {
359 match self {
360 Scalar::Raw { .. } => true,
361 _ => false,
362 }
363 }
364
365 /// Do not call this method! Dispatch based on the type instead.
366 #[inline]
367 pub fn is_ptr(self) -> bool {
368 match self {
369 Scalar::Ptr(_) => true,
370 _ => false,
371 }
372 }
373
374 pub fn to_bool(self) -> InterpResult<'tcx, bool> {
375 match self {
376 Scalar::Raw { data: 0, size: 1 } => Ok(false),
377 Scalar::Raw { data: 1, size: 1 } => Ok(true),
378 _ => throw_unsup!(InvalidBool),
379 }
380 }
381
382 pub fn to_char(self) -> InterpResult<'tcx, char> {
383 let val = self.to_u32()?;
384 match ::std::char::from_u32(val) {
385 Some(c) => Ok(c),
386 None => throw_unsup!(InvalidChar(val as u128)),
387 }
388 }
389
390 pub fn to_u8(self) -> InterpResult<'static, u8> {
391 let sz = Size::from_bits(8);
392 let b = self.to_bits(sz)?;
393 Ok(b as u8)
394 }
395
396 pub fn to_u32(self) -> InterpResult<'static, u32> {
397 let sz = Size::from_bits(32);
398 let b = self.to_bits(sz)?;
399 Ok(b as u32)
400 }
401
402 pub fn to_u64(self) -> InterpResult<'static, u64> {
403 let sz = Size::from_bits(64);
404 let b = self.to_bits(sz)?;
405 Ok(b as u64)
406 }
407
408 pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'static, u64> {
409 let b = self.to_bits(cx.data_layout().pointer_size)?;
410 Ok(b as u64)
411 }
412
413 pub fn to_i8(self) -> InterpResult<'static, i8> {
414 let sz = Size::from_bits(8);
415 let b = self.to_bits(sz)?;
416 let b = sign_extend(b, sz) as i128;
417 Ok(b as i8)
418 }
419
420 pub fn to_i32(self) -> InterpResult<'static, i32> {
421 let sz = Size::from_bits(32);
422 let b = self.to_bits(sz)?;
423 let b = sign_extend(b, sz) as i128;
424 Ok(b as i32)
425 }
426
427 pub fn to_i64(self) -> InterpResult<'static, i64> {
428 let sz = Size::from_bits(64);
429 let b = self.to_bits(sz)?;
430 let b = sign_extend(b, sz) as i128;
431 Ok(b as i64)
432 }
433
434 pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'static, i64> {
435 let sz = cx.data_layout().pointer_size;
436 let b = self.to_bits(sz)?;
437 let b = sign_extend(b, sz) as i128;
438 Ok(b as i64)
439 }
440
441 #[inline]
442 pub fn to_f32(self) -> InterpResult<'static, Single> {
443 // Going through `u32` to check size and truncation.
444 Ok(Single::from_bits(self.to_u32()? as u128))
445 }
446
447 #[inline]
448 pub fn to_f64(self) -> InterpResult<'static, Double> {
449 // Going through `u64` to check size and truncation.
450 Ok(Double::from_bits(self.to_u64()? as u128))
451 }
452 }
453
454 impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
455 #[inline(always)]
456 fn from(ptr: Pointer<Tag>) -> Self {
457 Scalar::Ptr(ptr)
458 }
459 }
460
461 #[derive(Clone, Copy, Eq, PartialEq, RustcEncodable, RustcDecodable, HashStable, Hash)]
462 pub enum ScalarMaybeUndef<Tag = (), Id = AllocId> {
463 Scalar(Scalar<Tag, Id>),
464 Undef,
465 }
466
467 impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
468 #[inline(always)]
469 fn from(s: Scalar<Tag>) -> Self {
470 ScalarMaybeUndef::Scalar(s)
471 }
472 }
473
474 impl<Tag> From<Pointer<Tag>> for ScalarMaybeUndef<Tag> {
475 #[inline(always)]
476 fn from(s: Pointer<Tag>) -> Self {
477 ScalarMaybeUndef::Scalar(s.into())
478 }
479 }
480
481 impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for ScalarMaybeUndef<Tag, Id> {
482 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
483 match self {
484 ScalarMaybeUndef::Undef => write!(f, "Undef"),
485 ScalarMaybeUndef::Scalar(s) => write!(f, "{:?}", s),
486 }
487 }
488 }
489
490 impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
491 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
492 match self {
493 ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
494 ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
495 }
496 }
497 }
498
499 impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
500 /// Erase the tag from the scalar, if any.
501 ///
502 /// Used by error reporting code to avoid having the error type depend on `Tag`.
503 #[inline]
504 pub fn erase_tag(self) -> ScalarMaybeUndef
505 {
506 match self {
507 ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
508 ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
509 }
510 }
511
512 #[inline]
513 pub fn not_undef(self) -> InterpResult<'static, Scalar<Tag>> {
514 match self {
515 ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
516 ScalarMaybeUndef::Undef => throw_unsup!(ReadUndefBytes(Size::ZERO)),
517 }
518 }
519
520 /// Do not call this method! Use either `assert_ptr` or `force_ptr`.
521 #[inline(always)]
522 pub fn to_ptr(self) -> InterpResult<'tcx, Pointer<Tag>> {
523 self.not_undef()?.to_ptr()
524 }
525
526 /// Do not call this method! Use either `assert_bits` or `force_bits`.
527 #[inline(always)]
528 pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
529 self.not_undef()?.to_bits(target_size)
530 }
531
532 #[inline(always)]
533 pub fn to_bool(self) -> InterpResult<'tcx, bool> {
534 self.not_undef()?.to_bool()
535 }
536
537 #[inline(always)]
538 pub fn to_char(self) -> InterpResult<'tcx, char> {
539 self.not_undef()?.to_char()
540 }
541
542 #[inline(always)]
543 pub fn to_f32(self) -> InterpResult<'tcx, Single> {
544 self.not_undef()?.to_f32()
545 }
546
547 #[inline(always)]
548 pub fn to_f64(self) -> InterpResult<'tcx, Double> {
549 self.not_undef()?.to_f64()
550 }
551
552 #[inline(always)]
553 pub fn to_u8(self) -> InterpResult<'tcx, u8> {
554 self.not_undef()?.to_u8()
555 }
556
557 #[inline(always)]
558 pub fn to_u32(self) -> InterpResult<'tcx, u32> {
559 self.not_undef()?.to_u32()
560 }
561
562 #[inline(always)]
563 pub fn to_u64(self) -> InterpResult<'tcx, u64> {
564 self.not_undef()?.to_u64()
565 }
566
567 #[inline(always)]
568 pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
569 self.not_undef()?.to_machine_usize(cx)
570 }
571
572 #[inline(always)]
573 pub fn to_i8(self) -> InterpResult<'tcx, i8> {
574 self.not_undef()?.to_i8()
575 }
576
577 #[inline(always)]
578 pub fn to_i32(self) -> InterpResult<'tcx, i32> {
579 self.not_undef()?.to_i32()
580 }
581
582 #[inline(always)]
583 pub fn to_i64(self) -> InterpResult<'tcx, i64> {
584 self.not_undef()?.to_i64()
585 }
586
587 #[inline(always)]
588 pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
589 self.not_undef()?.to_machine_isize(cx)
590 }
591 }
592
593 /// Gets the bytes of a constant slice value.
594 pub fn get_slice_bytes<'tcx>(cx: &impl HasDataLayout, val: ConstValue<'tcx>) -> &'tcx [u8] {
595 if let ConstValue::Slice { data, start, end } = val {
596 let len = end - start;
597 data.get_bytes(
598 cx,
599 // invent a pointer, only the offset is relevant anyway
600 Pointer::new(AllocId(0), Size::from_bytes(start as u64)),
601 Size::from_bytes(len as u64),
602 ).unwrap_or_else(|err| bug!("const slice is invalid: {:?}", err))
603 } else {
604 bug!("expected const slice, but found another const value");
605 }
606 }