1 //! An interpreter for MIR used in CTFE and by miri.
4 macro_rules
! err_unsup
{
6 $
crate::mir
::interpret
::InterpError
::Unsupported(
7 $
crate::mir
::interpret
::UnsupportedOpInfo
::$
($tt
)*
13 macro_rules
! err_unsup_format
{
14 ($
($tt
:tt
)*) => { err_unsup!(Unsupported(format!($($tt)*))) }
;
18 macro_rules
! err_inval
{
20 $
crate::mir
::interpret
::InterpError
::InvalidProgram(
21 $
crate::mir
::interpret
::InvalidProgramInfo
::$
($tt
)*
29 $
crate::mir
::interpret
::InterpError
::UndefinedBehavior(
30 $
crate::mir
::interpret
::UndefinedBehaviorInfo
::$
($tt
)*
36 macro_rules
! err_ub_format
{
37 ($
($tt
:tt
)*) => { err_ub!(Ub(format!($($tt)*))) }
;
41 macro_rules
! err_exhaust
{
43 $
crate::mir
::interpret
::InterpError
::ResourceExhaustion(
44 $
crate::mir
::interpret
::ResourceExhaustionInfo
::$
($tt
)*
50 macro_rules
! err_machine_stop
{
52 $
crate::mir
::interpret
::InterpError
::MachineStop(Box
::new($
($tt
)*))
56 // In the `throw_*` macros, avoid `return` to make them work with `try {}`.
58 macro_rules
! throw_unsup
{
59 ($
($tt
:tt
)*) => { Err::<!, _>(err_unsup!($($tt)*))? }
;
63 macro_rules
! throw_unsup_format
{
64 ($
($tt
:tt
)*) => { throw_unsup!(Unsupported(format!($($tt)*))) }
;
68 macro_rules
! throw_inval
{
69 ($
($tt
:tt
)*) => { Err::<!, _>(err_inval!($($tt)*))? }
;
73 macro_rules
! throw_ub
{
74 ($
($tt
:tt
)*) => { Err::<!, _>(err_ub!($($tt)*))? }
;
78 macro_rules
! throw_ub_format
{
79 ($
($tt
:tt
)*) => { throw_ub!(Ub(format!($($tt)*))) }
;
83 macro_rules
! throw_exhaust
{
84 ($
($tt
:tt
)*) => { Err::<!, _>(err_exhaust!($($tt)*))? }
;
88 macro_rules
! throw_machine_stop
{
89 ($
($tt
:tt
)*) => { Err::<!, _>(err_machine_stop!($($tt)*))? }
;
98 use std
::convert
::TryFrom
;
101 use std
::io
::{Read, Write}
;
102 use std
::num
::NonZeroU32
;
103 use std
::sync
::atomic
::{AtomicU32, Ordering}
;
105 use rustc_ast
::LitKind
;
106 use rustc_data_structures
::fx
::FxHashMap
;
107 use rustc_data_structures
::sync
::{HashMapExt, Lock}
;
108 use rustc_data_structures
::tiny_list
::TinyList
;
109 use rustc_hir
::def_id
::DefId
;
110 use rustc_macros
::HashStable
;
111 use rustc_middle
::ty
::print
::with_no_trimmed_paths
;
112 use rustc_serialize
::{Decodable, Encodable}
;
113 use rustc_target
::abi
::Endian
;
116 use crate::ty
::codec
::{TyDecoder, TyEncoder}
;
117 use crate::ty
::subst
::GenericArgKind
;
118 use crate::ty
::{self, Instance, Ty, TyCtxt}
;
120 pub use self::error
::{
121 struct_error
, CheckInAllocMsg
, ErrorHandled
, EvalToAllocationRawResult
, EvalToConstValueResult
,
122 InterpError
, InterpErrorInfo
, InterpResult
, InvalidProgramInfo
, MachineStopType
,
123 ResourceExhaustionInfo
, UndefinedBehaviorInfo
, UninitBytesAccess
, UnsupportedOpInfo
,
126 pub use self::value
::{get_slice_bytes, ConstAlloc, ConstValue, Scalar, ScalarMaybeUninit}
;
128 pub use self::allocation
::{Allocation, AllocationExtra, InitMask, Relocations}
;
130 pub use self::pointer
::{Pointer, PointerArithmetic}
;
132 /// Uniquely identifies one of the following:
135 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, TyEncodable, TyDecodable)]
136 #[derive(HashStable, Lift)]
137 pub struct GlobalId
<'tcx
> {
138 /// For a constant or static, the `Instance` of the item itself.
139 /// For a promoted global, the `Instance` of the function they belong to.
140 pub instance
: ty
::Instance
<'tcx
>,
142 /// The index for promoted globals within their function's `mir::Body`.
143 pub promoted
: Option
<mir
::Promoted
>,
146 impl GlobalId
<'tcx
> {
147 pub fn display(self, tcx
: TyCtxt
<'tcx
>) -> String
{
148 let instance_name
= with_no_trimmed_paths(|| tcx
.def_path_str(self.instance
.def
.def_id()));
149 if let Some(promoted
) = self.promoted
{
150 format
!("{}::{:?}", instance_name
, promoted
)
157 /// Input argument for `tcx.lit_to_const`.
158 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
159 pub struct LitToConstInput
<'tcx
> {
160 /// The absolute value of the resultant constant.
161 pub lit
: &'tcx LitKind
,
162 /// The type of the constant.
164 /// If the constant is negative.
168 /// Error type for `tcx.lit_to_const`.
169 #[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)]
170 pub enum LitToConstError
{
171 /// The literal's inferred type did not match the expected `ty` in the input.
172 /// This is used for graceful error handling (`delay_span_bug`) in
173 /// type checking (`Const::from_anon_const`).
179 #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
180 pub struct AllocId(pub u64);
182 // We want the `Debug` output to be readable as it is used by `derive(Debug)` for
183 // all the Miri types.
184 impl fmt
::Debug
for AllocId
{
185 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
186 if f
.alternate() { write!(f, "a{}
", self.0) } else { write!(f, "alloc{}", self.0) }
190 impl fmt
::Display
for AllocId
{
191 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
192 fmt
::Debug
::fmt(self, f
)
196 #[derive(TyDecodable, TyEncodable)]
197 enum AllocDiscriminant
{
203 pub fn specialized_encode_alloc_id
<'tcx
, E
: TyEncoder
<'tcx
>>(
207 ) -> Result
<(), E
::Error
> {
208 match tcx
.global_alloc(alloc_id
) {
209 GlobalAlloc
::Memory(alloc
) => {
210 trace
!("encoding {:?} with {:#?}", alloc_id
, alloc
);
211 AllocDiscriminant
::Alloc
.encode(encoder
)?
;
212 alloc
.encode(encoder
)?
;
214 GlobalAlloc
::Function(fn_instance
) => {
215 trace
!("encoding {:?} with {:#?}", alloc_id
, fn_instance
);
216 AllocDiscriminant
::Fn
.encode(encoder
)?
;
217 fn_instance
.encode(encoder
)?
;
219 GlobalAlloc
::Static(did
) => {
220 assert
!(!tcx
.is_thread_local_static(did
));
221 // References to statics doesn't need to know about their allocations,
222 // just about its `DefId`.
223 AllocDiscriminant
::Static
.encode(encoder
)?
;
224 did
.encode(encoder
)?
;
230 // Used to avoid infinite recursion when decoding cyclic allocations.
231 type DecodingSessionId
= NonZeroU32
;
236 InProgressNonAlloc(TinyList
<DecodingSessionId
>),
237 InProgress(TinyList
<DecodingSessionId
>, AllocId
),
241 pub struct AllocDecodingState
{
242 // For each `AllocId`, we keep track of which decoding state it's currently in.
243 decoding_state
: Vec
<Lock
<State
>>,
244 // The offsets of each allocation in the data stream.
245 data_offsets
: Vec
<u32>,
248 impl AllocDecodingState
{
249 pub fn new_decoding_session(&self) -> AllocDecodingSession
<'_
> {
250 static DECODER_SESSION_ID
: AtomicU32
= AtomicU32
::new(0);
251 let counter
= DECODER_SESSION_ID
.fetch_add(1, Ordering
::SeqCst
);
253 // Make sure this is never zero.
254 let session_id
= DecodingSessionId
::new((counter
& 0x7FFFFFFF) + 1).unwrap();
256 AllocDecodingSession { state: self, session_id }
259 pub fn new(data_offsets
: Vec
<u32>) -> Self {
260 let decoding_state
= vec
![Lock
::new(State
::Empty
); data_offsets
.len()];
262 Self { decoding_state, data_offsets }
266 #[derive(Copy, Clone)]
267 pub struct AllocDecodingSession
<'s
> {
268 state
: &'s AllocDecodingState
,
269 session_id
: DecodingSessionId
,
272 impl<'s
> AllocDecodingSession
<'s
> {
273 /// Decodes an `AllocId` in a thread-safe way.
274 pub fn decode_alloc_id
<D
>(&self, decoder
: &mut D
) -> Result
<AllocId
, D
::Error
>
278 // Read the index of the allocation.
279 let idx
= usize::try_from(decoder
.read_u32()?
).unwrap();
280 let pos
= usize::try_from(self.state
.data_offsets
[idx
]).unwrap();
282 // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
284 let (alloc_kind
, pos
) = decoder
.with_position(pos
, |decoder
| {
285 let alloc_kind
= AllocDiscriminant
::decode(decoder
)?
;
286 Ok((alloc_kind
, decoder
.position()))
289 // Check the decoding state to see if it's already decoded or if we should
292 let mut entry
= self.state
.decoding_state
[idx
].lock();
295 State
::Done(alloc_id
) => {
298 ref mut entry @ State
::Empty
=> {
299 // We are allowed to decode.
301 AllocDiscriminant
::Alloc
=> {
302 // If this is an allocation, we need to reserve an
303 // `AllocId` so we can decode cyclic graphs.
304 let alloc_id
= decoder
.tcx().reserve_alloc_id();
306 State
::InProgress(TinyList
::new_single(self.session_id
), alloc_id
);
309 AllocDiscriminant
::Fn
| AllocDiscriminant
::Static
=> {
310 // Fns and statics cannot be cyclic, and their `AllocId`
311 // is determined later by interning.
313 State
::InProgressNonAlloc(TinyList
::new_single(self.session_id
));
318 State
::InProgressNonAlloc(ref mut sessions
) => {
319 if sessions
.contains(&self.session_id
) {
320 bug
!("this should be unreachable");
322 // Start decoding concurrently.
323 sessions
.insert(self.session_id
);
327 State
::InProgress(ref mut sessions
, alloc_id
) => {
328 if sessions
.contains(&self.session_id
) {
332 // Start decoding concurrently.
333 sessions
.insert(self.session_id
);
340 // Now decode the actual data.
341 let alloc_id
= decoder
.with_position(pos
, |decoder
| {
343 AllocDiscriminant
::Alloc
=> {
344 let alloc
= <&'tcx Allocation
as Decodable
<_
>>::decode(decoder
)?
;
345 // We already have a reserved `AllocId`.
346 let alloc_id
= alloc_id
.unwrap();
347 trace
!("decoded alloc {:?}: {:#?}", alloc_id
, alloc
);
348 decoder
.tcx().set_alloc_id_same_memory(alloc_id
, alloc
);
351 AllocDiscriminant
::Fn
=> {
352 assert
!(alloc_id
.is_none());
353 trace
!("creating fn alloc ID");
354 let instance
= ty
::Instance
::decode(decoder
)?
;
355 trace
!("decoded fn alloc instance: {:?}", instance
);
356 let alloc_id
= decoder
.tcx().create_fn_alloc(instance
);
359 AllocDiscriminant
::Static
=> {
360 assert
!(alloc_id
.is_none());
361 trace
!("creating extern static alloc ID");
362 let did
= <DefId
as Decodable
<D
>>::decode(decoder
)?
;
363 trace
!("decoded static def-ID: {:?}", did
);
364 let alloc_id
= decoder
.tcx().create_static_alloc(did
);
370 self.state
.decoding_state
[idx
].with_lock(|entry
| {
371 *entry
= State
::Done(alloc_id
);
378 /// An allocation in the global (tcx-managed) memory can be either a function pointer,
379 /// a static, or a "real" allocation with some data in it.
380 #[derive(Debug, Clone, Eq, PartialEq, Hash, TyDecodable, TyEncodable, HashStable)]
381 pub enum GlobalAlloc
<'tcx
> {
382 /// The alloc ID is used as a function pointer.
383 Function(Instance
<'tcx
>),
384 /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
385 /// This is also used to break the cycle in recursive statics.
387 /// The alloc ID points to memory.
388 Memory(&'tcx Allocation
),
391 impl GlobalAlloc
<'tcx
> {
392 /// Panics if the `GlobalAlloc` does not refer to an `GlobalAlloc::Memory`
395 pub fn unwrap_memory(&self) -> &'tcx Allocation
{
397 GlobalAlloc
::Memory(mem
) => mem
,
398 _
=> bug
!("expected memory, got {:?}", self),
402 /// Panics if the `GlobalAlloc` is not `GlobalAlloc::Function`
405 pub fn unwrap_fn(&self) -> Instance
<'tcx
> {
407 GlobalAlloc
::Function(instance
) => instance
,
408 _
=> bug
!("expected function, got {:?}", self),
413 crate struct AllocMap
<'tcx
> {
414 /// Maps `AllocId`s to their corresponding allocations.
415 alloc_map
: FxHashMap
<AllocId
, GlobalAlloc
<'tcx
>>,
417 /// Used to ensure that statics and functions only get one associated `AllocId`.
418 /// Should never contain a `GlobalAlloc::Memory`!
420 // FIXME: Should we just have two separate dedup maps for statics and functions each?
421 dedup
: FxHashMap
<GlobalAlloc
<'tcx
>, AllocId
>,
423 /// The `AllocId` to assign to the next requested ID.
424 /// Always incremented; never gets smaller.
428 impl<'tcx
> AllocMap
<'tcx
> {
429 crate fn new() -> Self {
430 AllocMap { alloc_map: Default::default(), dedup: Default::default(), next_id: AllocId(0) }
432 fn reserve(&mut self) -> AllocId
{
433 let next
= self.next_id
;
434 self.next_id
.0 = self.next_id
.0.checked_add(1).expect(
435 "You overflowed a u64 by incrementing by 1... \
436 You've just earned yourself a free drink if we ever meet. \
437 Seriously, how did you do that?!",
443 impl<'tcx
> TyCtxt
<'tcx
> {
444 /// Obtains a new allocation ID that can be referenced but does not
445 /// yet have an allocation backing it.
447 /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
448 /// an `AllocId` from a query.
449 pub fn reserve_alloc_id(self) -> AllocId
{
450 self.alloc_map
.lock().reserve()
453 /// Reserves a new ID *if* this allocation has not been dedup-reserved before.
454 /// Should only be used for function pointers and statics, we don't want
455 /// to dedup IDs for "real" memory!
456 fn reserve_and_set_dedup(self, alloc
: GlobalAlloc
<'tcx
>) -> AllocId
{
457 let mut alloc_map
= self.alloc_map
.lock();
459 GlobalAlloc
::Function(..) | GlobalAlloc
::Static(..) => {}
460 GlobalAlloc
::Memory(..) => bug
!("Trying to dedup-reserve memory with real data!"),
462 if let Some(&alloc_id
) = alloc_map
.dedup
.get(&alloc
) {
465 let id
= alloc_map
.reserve();
466 debug
!("creating alloc {:?} with id {}", alloc
, id
);
467 alloc_map
.alloc_map
.insert(id
, alloc
.clone());
468 alloc_map
.dedup
.insert(alloc
, id
);
472 /// Generates an `AllocId` for a static or return a cached one in case this function has been
473 /// called on the same static before.
474 pub fn create_static_alloc(self, static_id
: DefId
) -> AllocId
{
475 self.reserve_and_set_dedup(GlobalAlloc
::Static(static_id
))
478 /// Generates an `AllocId` for a function. Depending on the function type,
479 /// this might get deduplicated or assigned a new ID each time.
480 pub fn create_fn_alloc(self, instance
: Instance
<'tcx
>) -> AllocId
{
481 // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
482 // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
483 // duplicated across crates.
484 // We thus generate a new `AllocId` for every mention of a function. This means that
485 // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
486 // However, formatting code relies on function identity (see #58320), so we only do
487 // this for generic functions. Lifetime parameters are ignored.
488 let is_generic
= instance
491 .any(|kind
| !matches
!(kind
.unpack(), GenericArgKind
::Lifetime(_
)));
494 let mut alloc_map
= self.alloc_map
.lock();
495 let id
= alloc_map
.reserve();
496 alloc_map
.alloc_map
.insert(id
, GlobalAlloc
::Function(instance
));
500 self.reserve_and_set_dedup(GlobalAlloc
::Function(instance
))
504 /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
505 /// `Allocation` with a different `AllocId`.
506 /// Statics with identical content will still point to the same `Allocation`, i.e.,
507 /// their data will be deduplicated through `Allocation` interning -- but they
508 /// are different places in memory and as such need different IDs.
509 pub fn create_memory_alloc(self, mem
: &'tcx Allocation
) -> AllocId
{
510 let id
= self.reserve_alloc_id();
511 self.set_alloc_id_memory(id
, mem
);
515 /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
516 /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
517 /// illegal and will likely ICE.
518 /// This function exists to allow const eval to detect the difference between evaluation-
519 /// local dangling pointers and allocations in constants/statics.
521 pub fn get_global_alloc(self, id
: AllocId
) -> Option
<GlobalAlloc
<'tcx
>> {
522 self.alloc_map
.lock().alloc_map
.get(&id
).cloned()
527 /// Panics in case the `AllocId` is dangling. Since that is impossible for `AllocId`s in
528 /// constants (as all constants must pass interning and validation that check for dangling
529 /// ids), this function is frequently used throughout rustc, but should not be used within
531 pub fn global_alloc(self, id
: AllocId
) -> GlobalAlloc
<'tcx
> {
532 match self.get_global_alloc(id
) {
533 Some(alloc
) => alloc
,
534 None
=> bug
!("could not find allocation for {}", id
),
538 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
539 /// call this function twice, even with the same `Allocation` will ICE the compiler.
540 pub fn set_alloc_id_memory(self, id
: AllocId
, mem
: &'tcx Allocation
) {
541 if let Some(old
) = self.alloc_map
.lock().alloc_map
.insert(id
, GlobalAlloc
::Memory(mem
)) {
542 bug
!("tried to set allocation ID {}, but it was already existing as {:#?}", id
, old
);
546 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
547 /// twice for the same `(AllocId, Allocation)` pair.
548 fn set_alloc_id_same_memory(self, id
: AllocId
, mem
: &'tcx Allocation
) {
549 self.alloc_map
.lock().alloc_map
.insert_same(id
, GlobalAlloc
::Memory(mem
));
553 ////////////////////////////////////////////////////////////////////////////////
554 // Methods to access integers in the target endianness
555 ////////////////////////////////////////////////////////////////////////////////
558 pub fn write_target_uint(
560 mut target
: &mut [u8],
562 ) -> Result
<(), io
::Error
> {
563 // This u128 holds an "any-size uint" (since smaller uints can fits in it)
564 // So we do not write all bytes of the u128, just the "payload".
566 Endian
::Little
=> target
.write(&data
.to_le_bytes())?
,
567 Endian
::Big
=> target
.write(&data
.to_be_bytes()[16 - target
.len()..])?
,
569 debug_assert
!(target
.len() == 0); // We should have filled the target buffer.
574 pub fn read_target_uint(endianness
: Endian
, mut source
: &[u8]) -> Result
<u128
, io
::Error
> {
575 // This u128 holds an "any-size uint" (since smaller uints can fits in it)
576 let mut buf
= [0u8; std
::mem
::size_of
::<u128
>()];
577 // So we do not read exactly 16 bytes into the u128, just the "payload".
578 let uint
= match endianness
{
580 source
.read(&mut buf
)?
;
581 Ok(u128
::from_le_bytes(buf
))
584 source
.read(&mut buf
[16 - source
.len()..])?
;
585 Ok(u128
::from_be_bytes(buf
))
588 debug_assert
!(source
.len() == 0); // We should have consumed the source buffer.
592 /// Computes the unsigned absolute value without wrapping or panicking.
594 pub fn uabs(value
: i64) -> u64 {
595 // The only tricky part here is if value == i64::MIN. In that case,
596 // wrapping_abs() returns i64::MIN == -2^63. Casting this value to a u64
597 // gives 2^63, the correct value.
598 value
.wrapping_abs() as u64