]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_middle/src/mir/interpret/value.rs
New upstream version 1.49.0~beta.4+dfsg1
[rustc.git] / compiler / rustc_middle / src / mir / interpret / value.rs
1 use std::convert::TryFrom;
2 use std::fmt;
3
4 use rustc_apfloat::{
5 ieee::{Double, Single},
6 Float,
7 };
8 use rustc_macros::HashStable;
9 use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
10
11 use crate::ty::{ParamEnv, ScalarInt, Ty, TyCtxt};
12
13 use super::{AllocId, Allocation, InterpResult, Pointer, PointerArithmetic};
14
15 /// Represents the result of const evaluation via the `eval_to_allocation` query.
16 #[derive(Clone, HashStable, TyEncodable, TyDecodable)]
17 pub struct ConstAlloc<'tcx> {
18 // the value lives here, at offset 0, and that allocation definitely is a `AllocKind::Memory`
19 // (so you can use `AllocMap::unwrap_memory`).
20 pub alloc_id: AllocId,
21 pub ty: Ty<'tcx>,
22 }
23
24 /// Represents a constant value in Rust. `Scalar` and `Slice` are optimizations for
25 /// array length computations, enum discriminants and the pattern matching logic.
26 #[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable, Hash)]
27 #[derive(HashStable)]
28 pub enum ConstValue<'tcx> {
29 /// Used only for types with `layout::abi::Scalar` ABI and ZSTs.
30 ///
31 /// Not using the enum `Value` to encode that this must not be `Uninit`.
32 Scalar(Scalar),
33
34 /// Used only for `&[u8]` and `&str`
35 Slice { data: &'tcx Allocation, start: usize, end: usize },
36
37 /// A value not represented/representable by `Scalar` or `Slice`
38 ByRef {
39 /// The backing memory of the value, may contain more memory than needed for just the value
40 /// in order to share `Allocation`s between values
41 alloc: &'tcx Allocation,
42 /// Offset into `alloc`
43 offset: Size,
44 },
45 }
46
47 #[cfg(target_arch = "x86_64")]
48 static_assert_size!(ConstValue<'_>, 32);
49
50 impl<'tcx> ConstValue<'tcx> {
51 #[inline]
52 pub fn try_to_scalar(&self) -> Option<Scalar> {
53 match *self {
54 ConstValue::ByRef { .. } | ConstValue::Slice { .. } => None,
55 ConstValue::Scalar(val) => Some(val),
56 }
57 }
58
59 pub fn try_to_bits(&self, size: Size) -> Option<u128> {
60 self.try_to_scalar()?.to_bits(size).ok()
61 }
62
63 pub fn try_to_bool(&self) -> Option<bool> {
64 match self.try_to_bits(Size::from_bytes(1))? {
65 0 => Some(false),
66 1 => Some(true),
67 _ => None,
68 }
69 }
70
71 pub fn try_to_machine_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
72 Some(self.try_to_bits(tcx.data_layout.pointer_size)? as u64)
73 }
74
75 pub fn try_to_bits_for_ty(
76 &self,
77 tcx: TyCtxt<'tcx>,
78 param_env: ParamEnv<'tcx>,
79 ty: Ty<'tcx>,
80 ) -> Option<u128> {
81 let size = tcx.layout_of(param_env.with_reveal_all_normalized(tcx).and(ty)).ok()?.size;
82 self.try_to_bits(size)
83 }
84
85 pub fn from_bool(b: bool) -> Self {
86 ConstValue::Scalar(Scalar::from_bool(b))
87 }
88
89 pub fn from_u64(i: u64) -> Self {
90 ConstValue::Scalar(Scalar::from_u64(i))
91 }
92
93 pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self {
94 ConstValue::Scalar(Scalar::from_machine_usize(i, cx))
95 }
96 }
97
98 /// A `Scalar` represents an immediate, primitive value existing outside of a
99 /// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 8 bytes in
100 /// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
101 /// of a simple value or a pointer into another `Allocation`
102 #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
103 #[derive(HashStable)]
104 pub enum Scalar<Tag = ()> {
105 /// The raw bytes of a simple value.
106 Int(ScalarInt),
107
108 /// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
109 /// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
110 /// relocation and its associated offset together as a `Pointer` here.
111 Ptr(Pointer<Tag>),
112 }
113
114 #[cfg(target_arch = "x86_64")]
115 static_assert_size!(Scalar, 24);
116
117 // We want the `Debug` output to be readable as it is used by `derive(Debug)` for
118 // all the Miri types.
119 impl<Tag: fmt::Debug> fmt::Debug for Scalar<Tag> {
120 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
121 match self {
122 Scalar::Ptr(ptr) => write!(f, "{:?}", ptr),
123 Scalar::Int(int) => write!(f, "{:?}", int),
124 }
125 }
126 }
127
128 impl<Tag: fmt::Debug> fmt::Display for Scalar<Tag> {
129 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
130 match self {
131 Scalar::Ptr(ptr) => write!(f, "pointer to {}", ptr),
132 Scalar::Int { .. } => fmt::Debug::fmt(self, f),
133 }
134 }
135 }
136
137 impl<Tag> From<Single> for Scalar<Tag> {
138 #[inline(always)]
139 fn from(f: Single) -> Self {
140 Scalar::from_f32(f)
141 }
142 }
143
144 impl<Tag> From<Double> for Scalar<Tag> {
145 #[inline(always)]
146 fn from(f: Double) -> Self {
147 Scalar::from_f64(f)
148 }
149 }
150
151 impl Scalar<()> {
152 /// Tag this scalar with `new_tag` if it is a pointer, leave it unchanged otherwise.
153 ///
154 /// Used by `MemPlace::replace_tag`.
155 #[inline]
156 pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
157 match self {
158 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
159 Scalar::Int(int) => Scalar::Int(int),
160 }
161 }
162 }
163
164 impl<'tcx, Tag> Scalar<Tag> {
165 pub const ZST: Self = Scalar::Int(ScalarInt::ZST);
166
167 /// Erase the tag from the scalar, if any.
168 ///
169 /// Used by error reporting code to avoid having the error type depend on `Tag`.
170 #[inline]
171 pub fn erase_tag(self) -> Scalar {
172 match self {
173 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
174 Scalar::Int(int) => Scalar::Int(int),
175 }
176 }
177
178 #[inline]
179 pub fn null_ptr(cx: &impl HasDataLayout) -> Self {
180 Scalar::Int(ScalarInt::null(cx.data_layout().pointer_size))
181 }
182
183 #[inline(always)]
184 fn ptr_op(
185 self,
186 dl: &TargetDataLayout,
187 f_int: impl FnOnce(u64) -> InterpResult<'tcx, u64>,
188 f_ptr: impl FnOnce(Pointer<Tag>) -> InterpResult<'tcx, Pointer<Tag>>,
189 ) -> InterpResult<'tcx, Self> {
190 match self {
191 Scalar::Int(int) => Ok(Scalar::Int(int.ptr_sized_op(dl, f_int)?)),
192 Scalar::Ptr(ptr) => Ok(Scalar::Ptr(f_ptr(ptr)?)),
193 }
194 }
195
196 #[inline]
197 pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
198 let dl = cx.data_layout();
199 self.ptr_op(dl, |int| dl.offset(int, i.bytes()), |ptr| ptr.offset(i, dl))
200 }
201
202 #[inline]
203 pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
204 let dl = cx.data_layout();
205 self.ptr_op(
206 dl,
207 |int| Ok(dl.overflowing_offset(int, i.bytes()).0),
208 |ptr| Ok(ptr.wrapping_offset(i, dl)),
209 )
210 .unwrap()
211 }
212
213 #[inline]
214 pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
215 let dl = cx.data_layout();
216 self.ptr_op(dl, |int| dl.signed_offset(int, i), |ptr| ptr.signed_offset(i, dl))
217 }
218
219 #[inline]
220 pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
221 let dl = cx.data_layout();
222 self.ptr_op(
223 dl,
224 |int| Ok(dl.overflowing_signed_offset(int, i).0),
225 |ptr| Ok(ptr.wrapping_signed_offset(i, dl)),
226 )
227 .unwrap()
228 }
229
230 #[inline]
231 pub fn from_bool(b: bool) -> Self {
232 Scalar::Int(b.into())
233 }
234
235 #[inline]
236 pub fn from_char(c: char) -> Self {
237 Scalar::Int(c.into())
238 }
239
240 #[inline]
241 pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
242 ScalarInt::try_from_uint(i, size).map(Scalar::Int)
243 }
244
245 #[inline]
246 pub fn from_uint(i: impl Into<u128>, size: Size) -> Self {
247 let i = i.into();
248 Self::try_from_uint(i, size)
249 .unwrap_or_else(|| bug!("Unsigned value {:#x} does not fit in {} bits", i, size.bits()))
250 }
251
252 #[inline]
253 pub fn from_u8(i: u8) -> Self {
254 Scalar::Int(i.into())
255 }
256
257 #[inline]
258 pub fn from_u16(i: u16) -> Self {
259 Scalar::Int(i.into())
260 }
261
262 #[inline]
263 pub fn from_u32(i: u32) -> Self {
264 Scalar::Int(i.into())
265 }
266
267 #[inline]
268 pub fn from_u64(i: u64) -> Self {
269 Scalar::Int(i.into())
270 }
271
272 #[inline]
273 pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self {
274 Self::from_uint(i, cx.data_layout().pointer_size)
275 }
276
277 #[inline]
278 pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> {
279 ScalarInt::try_from_int(i, size).map(Scalar::Int)
280 }
281
282 #[inline]
283 pub fn from_int(i: impl Into<i128>, size: Size) -> Self {
284 let i = i.into();
285 Self::try_from_int(i, size)
286 .unwrap_or_else(|| bug!("Signed value {:#x} does not fit in {} bits", i, size.bits()))
287 }
288
289 #[inline]
290 pub fn from_i8(i: i8) -> Self {
291 Self::from_int(i, Size::from_bits(8))
292 }
293
294 #[inline]
295 pub fn from_i16(i: i16) -> Self {
296 Self::from_int(i, Size::from_bits(16))
297 }
298
299 #[inline]
300 pub fn from_i32(i: i32) -> Self {
301 Self::from_int(i, Size::from_bits(32))
302 }
303
304 #[inline]
305 pub fn from_i64(i: i64) -> Self {
306 Self::from_int(i, Size::from_bits(64))
307 }
308
309 #[inline]
310 pub fn from_machine_isize(i: i64, cx: &impl HasDataLayout) -> Self {
311 Self::from_int(i, cx.data_layout().pointer_size)
312 }
313
314 #[inline]
315 pub fn from_f32(f: Single) -> Self {
316 Scalar::Int(f.into())
317 }
318
319 #[inline]
320 pub fn from_f64(f: Double) -> Self {
321 Scalar::Int(f.into())
322 }
323
324 /// This is very rarely the method you want! You should dispatch on the type
325 /// and use `force_bits`/`assert_bits`/`force_ptr`/`assert_ptr`.
326 /// This method only exists for the benefit of low-level memory operations
327 /// as well as the implementation of the `force_*` methods.
328 #[inline]
329 pub fn to_bits_or_ptr(
330 self,
331 target_size: Size,
332 cx: &impl HasDataLayout,
333 ) -> Result<u128, Pointer<Tag>> {
334 assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
335 match self {
336 Scalar::Int(int) => Ok(int.assert_bits(target_size)),
337 Scalar::Ptr(ptr) => {
338 assert_eq!(target_size, cx.data_layout().pointer_size);
339 Err(ptr)
340 }
341 }
342 }
343
344 /// This method is intentionally private!
345 /// It is just a helper for other methods in this file.
346 #[inline]
347 fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
348 assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
349 match self {
350 Scalar::Int(int) => int.to_bits(target_size).map_err(|size| {
351 err_ub!(ScalarSizeMismatch {
352 target_size: target_size.bytes(),
353 data_size: size.bytes(),
354 })
355 .into()
356 }),
357 Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes),
358 }
359 }
360
361 #[inline(always)]
362 pub fn assert_bits(self, target_size: Size) -> u128 {
363 self.to_bits(target_size).expect("expected Raw bits but got a Pointer")
364 }
365
366 #[inline]
367 pub fn assert_int(self) -> ScalarInt {
368 match self {
369 Scalar::Ptr(_) => bug!("expected an int but got an abstract pointer"),
370 Scalar::Int(int) => int,
371 }
372 }
373
374 #[inline]
375 pub fn assert_ptr(self) -> Pointer<Tag> {
376 match self {
377 Scalar::Ptr(p) => p,
378 Scalar::Int { .. } => bug!("expected a Pointer but got Raw bits"),
379 }
380 }
381
382 /// Do not call this method! Dispatch based on the type instead.
383 #[inline]
384 pub fn is_bits(self) -> bool {
385 matches!(self, Scalar::Int { .. })
386 }
387
388 /// Do not call this method! Dispatch based on the type instead.
389 #[inline]
390 pub fn is_ptr(self) -> bool {
391 matches!(self, Scalar::Ptr(_))
392 }
393
394 pub fn to_bool(self) -> InterpResult<'tcx, bool> {
395 let val = self.to_u8()?;
396 match val {
397 0 => Ok(false),
398 1 => Ok(true),
399 _ => throw_ub!(InvalidBool(val)),
400 }
401 }
402
403 pub fn to_char(self) -> InterpResult<'tcx, char> {
404 let val = self.to_u32()?;
405 match std::char::from_u32(val) {
406 Some(c) => Ok(c),
407 None => throw_ub!(InvalidChar(val)),
408 }
409 }
410
411 #[inline]
412 fn to_unsigned_with_bit_width(self, bits: u64) -> InterpResult<'static, u128> {
413 let sz = Size::from_bits(bits);
414 self.to_bits(sz)
415 }
416
417 /// Converts the scalar to produce an `u8`. Fails if the scalar is a pointer.
418 pub fn to_u8(self) -> InterpResult<'static, u8> {
419 self.to_unsigned_with_bit_width(8).map(|v| u8::try_from(v).unwrap())
420 }
421
422 /// Converts the scalar to produce an `u16`. Fails if the scalar is a pointer.
423 pub fn to_u16(self) -> InterpResult<'static, u16> {
424 self.to_unsigned_with_bit_width(16).map(|v| u16::try_from(v).unwrap())
425 }
426
427 /// Converts the scalar to produce an `u32`. Fails if the scalar is a pointer.
428 pub fn to_u32(self) -> InterpResult<'static, u32> {
429 self.to_unsigned_with_bit_width(32).map(|v| u32::try_from(v).unwrap())
430 }
431
432 /// Converts the scalar to produce an `u64`. Fails if the scalar is a pointer.
433 pub fn to_u64(self) -> InterpResult<'static, u64> {
434 self.to_unsigned_with_bit_width(64).map(|v| u64::try_from(v).unwrap())
435 }
436
437 /// Converts the scalar to produce an `u128`. Fails if the scalar is a pointer.
438 pub fn to_u128(self) -> InterpResult<'static, u128> {
439 self.to_unsigned_with_bit_width(128)
440 }
441
442 pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'static, u64> {
443 let b = self.to_bits(cx.data_layout().pointer_size)?;
444 Ok(u64::try_from(b).unwrap())
445 }
446
447 #[inline]
448 fn to_signed_with_bit_width(self, bits: u64) -> InterpResult<'static, i128> {
449 let sz = Size::from_bits(bits);
450 let b = self.to_bits(sz)?;
451 Ok(sz.sign_extend(b) as i128)
452 }
453
454 /// Converts the scalar to produce an `i8`. Fails if the scalar is a pointer.
455 pub fn to_i8(self) -> InterpResult<'static, i8> {
456 self.to_signed_with_bit_width(8).map(|v| i8::try_from(v).unwrap())
457 }
458
459 /// Converts the scalar to produce an `i16`. Fails if the scalar is a pointer.
460 pub fn to_i16(self) -> InterpResult<'static, i16> {
461 self.to_signed_with_bit_width(16).map(|v| i16::try_from(v).unwrap())
462 }
463
464 /// Converts the scalar to produce an `i32`. Fails if the scalar is a pointer.
465 pub fn to_i32(self) -> InterpResult<'static, i32> {
466 self.to_signed_with_bit_width(32).map(|v| i32::try_from(v).unwrap())
467 }
468
469 /// Converts the scalar to produce an `i64`. Fails if the scalar is a pointer.
470 pub fn to_i64(self) -> InterpResult<'static, i64> {
471 self.to_signed_with_bit_width(64).map(|v| i64::try_from(v).unwrap())
472 }
473
474 /// Converts the scalar to produce an `i128`. Fails if the scalar is a pointer.
475 pub fn to_i128(self) -> InterpResult<'static, i128> {
476 self.to_signed_with_bit_width(128)
477 }
478
479 pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'static, i64> {
480 let sz = cx.data_layout().pointer_size;
481 let b = self.to_bits(sz)?;
482 let b = sz.sign_extend(b) as i128;
483 Ok(i64::try_from(b).unwrap())
484 }
485
486 #[inline]
487 pub fn to_f32(self) -> InterpResult<'static, Single> {
488 // Going through `u32` to check size and truncation.
489 Ok(Single::from_bits(self.to_u32()?.into()))
490 }
491
492 #[inline]
493 pub fn to_f64(self) -> InterpResult<'static, Double> {
494 // Going through `u64` to check size and truncation.
495 Ok(Double::from_bits(self.to_u64()?.into()))
496 }
497 }
498
499 impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
500 #[inline(always)]
501 fn from(ptr: Pointer<Tag>) -> Self {
502 Scalar::Ptr(ptr)
503 }
504 }
505
506 #[derive(Clone, Copy, Eq, PartialEq, TyEncodable, TyDecodable, HashStable, Hash)]
507 pub enum ScalarMaybeUninit<Tag = ()> {
508 Scalar(Scalar<Tag>),
509 Uninit,
510 }
511
512 #[cfg(target_arch = "x86_64")]
513 static_assert_size!(ScalarMaybeUninit, 24);
514
515 impl<Tag> From<Scalar<Tag>> for ScalarMaybeUninit<Tag> {
516 #[inline(always)]
517 fn from(s: Scalar<Tag>) -> Self {
518 ScalarMaybeUninit::Scalar(s)
519 }
520 }
521
522 impl<Tag> From<Pointer<Tag>> for ScalarMaybeUninit<Tag> {
523 #[inline(always)]
524 fn from(s: Pointer<Tag>) -> Self {
525 ScalarMaybeUninit::Scalar(s.into())
526 }
527 }
528
529 // We want the `Debug` output to be readable as it is used by `derive(Debug)` for
530 // all the Miri types.
531 impl<Tag: fmt::Debug> fmt::Debug for ScalarMaybeUninit<Tag> {
532 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
533 match self {
534 ScalarMaybeUninit::Uninit => write!(f, "<uninitialized>"),
535 ScalarMaybeUninit::Scalar(s) => write!(f, "{:?}", s),
536 }
537 }
538 }
539
540 impl<Tag: fmt::Debug> fmt::Display for ScalarMaybeUninit<Tag> {
541 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
542 match self {
543 ScalarMaybeUninit::Uninit => write!(f, "uninitialized bytes"),
544 ScalarMaybeUninit::Scalar(s) => write!(f, "{}", s),
545 }
546 }
547 }
548
549 impl<'tcx, Tag> ScalarMaybeUninit<Tag> {
550 /// Erase the tag from the scalar, if any.
551 ///
552 /// Used by error reporting code to avoid having the error type depend on `Tag`.
553 #[inline]
554 pub fn erase_tag(self) -> ScalarMaybeUninit {
555 match self {
556 ScalarMaybeUninit::Scalar(s) => ScalarMaybeUninit::Scalar(s.erase_tag()),
557 ScalarMaybeUninit::Uninit => ScalarMaybeUninit::Uninit,
558 }
559 }
560
561 #[inline]
562 pub fn check_init(self) -> InterpResult<'static, Scalar<Tag>> {
563 match self {
564 ScalarMaybeUninit::Scalar(scalar) => Ok(scalar),
565 ScalarMaybeUninit::Uninit => throw_ub!(InvalidUninitBytes(None)),
566 }
567 }
568
569 #[inline(always)]
570 pub fn to_bool(self) -> InterpResult<'tcx, bool> {
571 self.check_init()?.to_bool()
572 }
573
574 #[inline(always)]
575 pub fn to_char(self) -> InterpResult<'tcx, char> {
576 self.check_init()?.to_char()
577 }
578
579 #[inline(always)]
580 pub fn to_f32(self) -> InterpResult<'tcx, Single> {
581 self.check_init()?.to_f32()
582 }
583
584 #[inline(always)]
585 pub fn to_f64(self) -> InterpResult<'tcx, Double> {
586 self.check_init()?.to_f64()
587 }
588
589 #[inline(always)]
590 pub fn to_u8(self) -> InterpResult<'tcx, u8> {
591 self.check_init()?.to_u8()
592 }
593
594 #[inline(always)]
595 pub fn to_u16(self) -> InterpResult<'tcx, u16> {
596 self.check_init()?.to_u16()
597 }
598
599 #[inline(always)]
600 pub fn to_u32(self) -> InterpResult<'tcx, u32> {
601 self.check_init()?.to_u32()
602 }
603
604 #[inline(always)]
605 pub fn to_u64(self) -> InterpResult<'tcx, u64> {
606 self.check_init()?.to_u64()
607 }
608
609 #[inline(always)]
610 pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
611 self.check_init()?.to_machine_usize(cx)
612 }
613
614 #[inline(always)]
615 pub fn to_i8(self) -> InterpResult<'tcx, i8> {
616 self.check_init()?.to_i8()
617 }
618
619 #[inline(always)]
620 pub fn to_i16(self) -> InterpResult<'tcx, i16> {
621 self.check_init()?.to_i16()
622 }
623
624 #[inline(always)]
625 pub fn to_i32(self) -> InterpResult<'tcx, i32> {
626 self.check_init()?.to_i32()
627 }
628
629 #[inline(always)]
630 pub fn to_i64(self) -> InterpResult<'tcx, i64> {
631 self.check_init()?.to_i64()
632 }
633
634 #[inline(always)]
635 pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
636 self.check_init()?.to_machine_isize(cx)
637 }
638 }
639
640 /// Gets the bytes of a constant slice value.
641 pub fn get_slice_bytes<'tcx>(cx: &impl HasDataLayout, val: ConstValue<'tcx>) -> &'tcx [u8] {
642 if let ConstValue::Slice { data, start, end } = val {
643 let len = end - start;
644 data.get_bytes(
645 cx,
646 // invent a pointer, only the offset is relevant anyway
647 Pointer::new(AllocId(0), Size::from_bytes(start)),
648 Size::from_bytes(len),
649 )
650 .unwrap_or_else(|err| bug!("const slice is invalid: {:?}", err))
651 } else {
652 bug!("expected const slice, but found another const value");
653 }
654 }