1 //! An interpreter for MIR used in CTFE and by miri.
4 macro_rules
! err_unsup
{
6 $
crate::mir
::interpret
::InterpError
::Unsupported(
7 $
crate::mir
::interpret
::UnsupportedOpInfo
::$
($tt
)*
13 macro_rules
! err_unsup_format
{
14 ($
($tt
:tt
)*) => { err_unsup!(Unsupported(format!($($tt)*))) }
;
18 macro_rules
! err_inval
{
20 $
crate::mir
::interpret
::InterpError
::InvalidProgram(
21 $
crate::mir
::interpret
::InvalidProgramInfo
::$
($tt
)*
29 $
crate::mir
::interpret
::InterpError
::UndefinedBehavior(
30 $
crate::mir
::interpret
::UndefinedBehaviorInfo
::$
($tt
)*
36 macro_rules
! err_ub_format
{
37 ($
($tt
:tt
)*) => { err_ub!(Ub(format!($($tt)*))) }
;
41 macro_rules
! err_exhaust
{
43 $
crate::mir
::interpret
::InterpError
::ResourceExhaustion(
44 $
crate::mir
::interpret
::ResourceExhaustionInfo
::$
($tt
)*
50 macro_rules
! err_machine_stop
{
52 $
crate::mir
::interpret
::InterpError
::MachineStop(Box
::new($
($tt
)*))
56 // In the `throw_*` macros, avoid `return` to make them work with `try {}`.
58 macro_rules
! throw_unsup
{
59 ($
($tt
:tt
)*) => { Err::<!, _>(err_unsup!($($tt)*))? }
;
63 macro_rules
! throw_unsup_format
{
64 ($
($tt
:tt
)*) => { throw_unsup!(Unsupported(format!($($tt)*))) }
;
68 macro_rules
! throw_inval
{
69 ($
($tt
:tt
)*) => { Err::<!, _>(err_inval!($($tt)*))? }
;
73 macro_rules
! throw_ub
{
74 ($
($tt
:tt
)*) => { Err::<!, _>(err_ub!($($tt)*))? }
;
78 macro_rules
! throw_ub_format
{
79 ($
($tt
:tt
)*) => { throw_ub!(Ub(format!($($tt)*))) }
;
83 macro_rules
! throw_exhaust
{
84 ($
($tt
:tt
)*) => { Err::<!, _>(err_exhaust!($($tt)*))? }
;
88 macro_rules
! throw_machine_stop
{
89 ($
($tt
:tt
)*) => { Err::<!, _>(err_machine_stop!($($tt)*))? }
;
98 use std
::convert
::TryFrom
;
101 use std
::num
::NonZeroU32
;
102 use std
::sync
::atomic
::{AtomicU32, Ordering}
;
104 use byteorder
::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt}
;
105 use rustc_ast
::ast
::LitKind
;
106 use rustc_data_structures
::fx
::FxHashMap
;
107 use rustc_data_structures
::sync
::{HashMapExt, Lock}
;
108 use rustc_data_structures
::tiny_list
::TinyList
;
109 use rustc_hir
::def_id
::DefId
;
110 use rustc_macros
::HashStable
;
111 use rustc_serialize
::{Decodable, Encodable, Encoder}
;
112 use rustc_target
::abi
::{Endian, Size}
;
115 use crate::ty
::codec
::TyDecoder
;
116 use crate::ty
::subst
::GenericArgKind
;
117 use crate::ty
::{self, Instance, Ty, TyCtxt}
;
119 pub use self::error
::{
120 struct_error
, ConstEvalErr
, ConstEvalRawResult
, ConstEvalResult
, ErrorHandled
, FrameInfo
,
121 InterpError
, InterpErrorInfo
, InterpResult
, InvalidProgramInfo
, MachineStopType
,
122 ResourceExhaustionInfo
, UndefinedBehaviorInfo
, UnsupportedOpInfo
,
125 pub use self::value
::{get_slice_bytes, ConstValue, RawConst, Scalar, ScalarMaybeUndef}
;
127 pub use self::allocation
::{Allocation, AllocationExtra, Relocations, UndefMask}
;
129 pub use self::pointer
::{CheckInAllocMsg, Pointer, PointerArithmetic}
;
131 /// Uniquely identifies one of the following:
134 /// - A const fn where all arguments (if any) are zero-sized types
135 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, RustcEncodable, RustcDecodable)]
136 #[derive(HashStable, Lift)]
137 pub struct GlobalId
<'tcx
> {
138 /// For a constant or static, the `Instance` of the item itself.
139 /// For a promoted global, the `Instance` of the function they belong to.
140 pub instance
: ty
::Instance
<'tcx
>,
142 /// The index for promoted globals within their function's `mir::Body`.
143 pub promoted
: Option
<mir
::Promoted
>,
146 /// Input argument for `tcx.lit_to_const`.
147 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
148 pub struct LitToConstInput
<'tcx
> {
149 /// The absolute value of the resultant constant.
150 pub lit
: &'tcx LitKind
,
151 /// The type of the constant.
153 /// If the constant is negative.
157 /// Error type for `tcx.lit_to_const`.
158 #[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)]
159 pub enum LitToConstError
{
160 /// The literal's inferred type did not match the expected `ty` in the input.
161 /// This is used for graceful error handling (`delay_span_bug`) in
162 /// type checking (`Const::from_anon_const`).
168 #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
169 pub struct AllocId(pub u64);
171 impl fmt
::Debug
for AllocId
{
172 fn fmt(&self, fmt
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
173 fmt
::Display
::fmt(self, fmt
)
177 impl fmt
::Display
for AllocId
{
178 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
179 write
!(f
, "alloc{}", self.0)
183 impl rustc_serialize
::UseSpecializedEncodable
for AllocId {}
184 impl rustc_serialize
::UseSpecializedDecodable
for AllocId {}
186 #[derive(RustcDecodable, RustcEncodable)]
187 enum AllocDiscriminant
{
193 pub fn specialized_encode_alloc_id
<'tcx
, E
: Encoder
>(
197 ) -> Result
<(), E
::Error
> {
198 let alloc
: GlobalAlloc
<'tcx
> =
199 tcx
.alloc_map
.lock().get(alloc_id
).expect("no value for given alloc ID");
201 GlobalAlloc
::Memory(alloc
) => {
202 trace
!("encoding {:?} with {:#?}", alloc_id
, alloc
);
203 AllocDiscriminant
::Alloc
.encode(encoder
)?
;
204 alloc
.encode(encoder
)?
;
206 GlobalAlloc
::Function(fn_instance
) => {
207 trace
!("encoding {:?} with {:#?}", alloc_id
, fn_instance
);
208 AllocDiscriminant
::Fn
.encode(encoder
)?
;
209 fn_instance
.encode(encoder
)?
;
211 GlobalAlloc
::Static(did
) => {
212 // References to statics doesn't need to know about their allocations,
213 // just about its `DefId`.
214 AllocDiscriminant
::Static
.encode(encoder
)?
;
215 did
.encode(encoder
)?
;
221 // Used to avoid infinite recursion when decoding cyclic allocations.
222 type DecodingSessionId
= NonZeroU32
;
227 InProgressNonAlloc(TinyList
<DecodingSessionId
>),
228 InProgress(TinyList
<DecodingSessionId
>, AllocId
),
232 pub struct AllocDecodingState
{
233 // For each `AllocId`, we keep track of which decoding state it's currently in.
234 decoding_state
: Vec
<Lock
<State
>>,
235 // The offsets of each allocation in the data stream.
236 data_offsets
: Vec
<u32>,
239 impl AllocDecodingState
{
240 pub fn new_decoding_session(&self) -> AllocDecodingSession
<'_
> {
241 static DECODER_SESSION_ID
: AtomicU32
= AtomicU32
::new(0);
242 let counter
= DECODER_SESSION_ID
.fetch_add(1, Ordering
::SeqCst
);
244 // Make sure this is never zero.
245 let session_id
= DecodingSessionId
::new((counter
& 0x7FFFFFFF) + 1).unwrap();
247 AllocDecodingSession { state: self, session_id }
250 pub fn new(data_offsets
: Vec
<u32>) -> Self {
251 let decoding_state
= vec
![Lock
::new(State
::Empty
); data_offsets
.len()];
253 Self { decoding_state, data_offsets }
257 #[derive(Copy, Clone)]
258 pub struct AllocDecodingSession
<'s
> {
259 state
: &'s AllocDecodingState
,
260 session_id
: DecodingSessionId
,
263 impl<'s
> AllocDecodingSession
<'s
> {
264 /// Decodes an `AllocId` in a thread-safe way.
265 pub fn decode_alloc_id
<D
>(&self, decoder
: &mut D
) -> Result
<AllocId
, D
::Error
>
269 // Read the index of the allocation.
270 let idx
= usize::try_from(decoder
.read_u32()?
).unwrap();
271 let pos
= usize::try_from(self.state
.data_offsets
[idx
]).unwrap();
273 // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
275 let (alloc_kind
, pos
) = decoder
.with_position(pos
, |decoder
| {
276 let alloc_kind
= AllocDiscriminant
::decode(decoder
)?
;
277 Ok((alloc_kind
, decoder
.position()))
280 // Check the decoding state to see if it's already decoded or if we should
283 let mut entry
= self.state
.decoding_state
[idx
].lock();
286 State
::Done(alloc_id
) => {
289 ref mut entry @ State
::Empty
=> {
290 // We are allowed to decode.
292 AllocDiscriminant
::Alloc
=> {
293 // If this is an allocation, we need to reserve an
294 // `AllocId` so we can decode cyclic graphs.
295 let alloc_id
= decoder
.tcx().alloc_map
.lock().reserve();
297 State
::InProgress(TinyList
::new_single(self.session_id
), alloc_id
);
300 AllocDiscriminant
::Fn
| AllocDiscriminant
::Static
=> {
301 // Fns and statics cannot be cyclic, and their `AllocId`
302 // is determined later by interning.
304 State
::InProgressNonAlloc(TinyList
::new_single(self.session_id
));
309 State
::InProgressNonAlloc(ref mut sessions
) => {
310 if sessions
.contains(&self.session_id
) {
311 bug
!("this should be unreachable");
313 // Start decoding concurrently.
314 sessions
.insert(self.session_id
);
318 State
::InProgress(ref mut sessions
, alloc_id
) => {
319 if sessions
.contains(&self.session_id
) {
323 // Start decoding concurrently.
324 sessions
.insert(self.session_id
);
331 // Now decode the actual data.
332 let alloc_id
= decoder
.with_position(pos
, |decoder
| {
334 AllocDiscriminant
::Alloc
=> {
335 let alloc
= <&'tcx Allocation
as Decodable
>::decode(decoder
)?
;
336 // We already have a reserved `AllocId`.
337 let alloc_id
= alloc_id
.unwrap();
338 trace
!("decoded alloc {:?}: {:#?}", alloc_id
, alloc
);
339 decoder
.tcx().alloc_map
.lock().set_alloc_id_same_memory(alloc_id
, alloc
);
342 AllocDiscriminant
::Fn
=> {
343 assert
!(alloc_id
.is_none());
344 trace
!("creating fn alloc ID");
345 let instance
= ty
::Instance
::decode(decoder
)?
;
346 trace
!("decoded fn alloc instance: {:?}", instance
);
347 let alloc_id
= decoder
.tcx().alloc_map
.lock().create_fn_alloc(instance
);
350 AllocDiscriminant
::Static
=> {
351 assert
!(alloc_id
.is_none());
352 trace
!("creating extern static alloc ID");
353 let did
= DefId
::decode(decoder
)?
;
354 trace
!("decoded static def-ID: {:?}", did
);
355 let alloc_id
= decoder
.tcx().alloc_map
.lock().create_static_alloc(did
);
361 self.state
.decoding_state
[idx
].with_lock(|entry
| {
362 *entry
= State
::Done(alloc_id
);
369 /// An allocation in the global (tcx-managed) memory can be either a function pointer,
370 /// a static, or a "real" allocation with some data in it.
371 #[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable, HashStable)]
372 pub enum GlobalAlloc
<'tcx
> {
373 /// The alloc ID is used as a function pointer.
374 Function(Instance
<'tcx
>),
375 /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
376 /// This is also used to break the cycle in recursive statics.
378 /// The alloc ID points to memory.
379 Memory(&'tcx Allocation
),
382 pub struct AllocMap
<'tcx
> {
383 /// Maps `AllocId`s to their corresponding allocations.
384 alloc_map
: FxHashMap
<AllocId
, GlobalAlloc
<'tcx
>>,
386 /// Used to ensure that statics and functions only get one associated `AllocId`.
387 /// Should never contain a `GlobalAlloc::Memory`!
389 // FIXME: Should we just have two separate dedup maps for statics and functions each?
390 dedup
: FxHashMap
<GlobalAlloc
<'tcx
>, AllocId
>,
392 /// The `AllocId` to assign to the next requested ID.
393 /// Always incremented; never gets smaller.
397 impl<'tcx
> AllocMap
<'tcx
> {
398 pub fn new() -> Self {
399 AllocMap { alloc_map: Default::default(), dedup: Default::default(), next_id: AllocId(0) }
402 /// Obtains a new allocation ID that can be referenced but does not
403 /// yet have an allocation backing it.
405 /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
406 /// an `AllocId` from a query.
407 pub fn reserve(&mut self) -> AllocId
{
408 let next
= self.next_id
;
409 self.next_id
.0 = self.next_id
.0.checked_add(1).expect(
410 "You overflowed a u64 by incrementing by 1... \
411 You've just earned yourself a free drink if we ever meet. \
412 Seriously, how did you do that?!",
417 /// Reserves a new ID *if* this allocation has not been dedup-reserved before.
418 /// Should only be used for function pointers and statics, we don't want
419 /// to dedup IDs for "real" memory!
420 fn reserve_and_set_dedup(&mut self, alloc
: GlobalAlloc
<'tcx
>) -> AllocId
{
422 GlobalAlloc
::Function(..) | GlobalAlloc
::Static(..) => {}
423 GlobalAlloc
::Memory(..) => bug
!("Trying to dedup-reserve memory with real data!"),
425 if let Some(&alloc_id
) = self.dedup
.get(&alloc
) {
428 let id
= self.reserve();
429 debug
!("creating alloc {:?} with id {}", alloc
, id
);
430 self.alloc_map
.insert(id
, alloc
.clone());
431 self.dedup
.insert(alloc
, id
);
435 /// Generates an `AllocId` for a static or return a cached one in case this function has been
436 /// called on the same static before.
437 pub fn create_static_alloc(&mut self, static_id
: DefId
) -> AllocId
{
438 self.reserve_and_set_dedup(GlobalAlloc
::Static(static_id
))
441 /// Generates an `AllocId` for a function. Depending on the function type,
442 /// this might get deduplicated or assigned a new ID each time.
443 pub fn create_fn_alloc(&mut self, instance
: Instance
<'tcx
>) -> AllocId
{
444 // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
445 // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
446 // duplicated across crates.
447 // We thus generate a new `AllocId` for every mention of a function. This means that
448 // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
449 // However, formatting code relies on function identity (see #58320), so we only do
450 // this for generic functions. Lifetime parameters are ignored.
451 let is_generic
= instance
.substs
.into_iter().any(|kind
| match kind
.unpack() {
452 GenericArgKind
::Lifetime(_
) => false,
457 let id
= self.reserve();
458 self.alloc_map
.insert(id
, GlobalAlloc
::Function(instance
));
462 self.reserve_and_set_dedup(GlobalAlloc
::Function(instance
))
466 /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
467 /// `Allocation` with a different `AllocId`.
468 /// Statics with identical content will still point to the same `Allocation`, i.e.,
469 /// their data will be deduplicated through `Allocation` interning -- but they
470 /// are different places in memory and as such need different IDs.
471 pub fn create_memory_alloc(&mut self, mem
: &'tcx Allocation
) -> AllocId
{
472 let id
= self.reserve();
473 self.set_alloc_id_memory(id
, mem
);
477 /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
478 /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
479 /// illegal and will likely ICE.
480 /// This function exists to allow const eval to detect the difference between evaluation-
481 /// local dangling pointers and allocations in constants/statics.
483 pub fn get(&self, id
: AllocId
) -> Option
<GlobalAlloc
<'tcx
>> {
484 self.alloc_map
.get(&id
).cloned()
487 /// Panics if the `AllocId` does not refer to an `Allocation`
488 pub fn unwrap_memory(&self, id
: AllocId
) -> &'tcx Allocation
{
490 Some(GlobalAlloc
::Memory(mem
)) => mem
,
491 _
=> bug
!("expected allocation ID {} to point to memory", id
),
495 /// Panics if the `AllocId` does not refer to a function
496 pub fn unwrap_fn(&self, id
: AllocId
) -> Instance
<'tcx
> {
498 Some(GlobalAlloc
::Function(instance
)) => instance
,
499 _
=> bug
!("expected allocation ID {} to point to a function", id
),
503 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
504 /// call this function twice, even with the same `Allocation` will ICE the compiler.
505 pub fn set_alloc_id_memory(&mut self, id
: AllocId
, mem
: &'tcx Allocation
) {
506 if let Some(old
) = self.alloc_map
.insert(id
, GlobalAlloc
::Memory(mem
)) {
507 bug
!("tried to set allocation ID {}, but it was already existing as {:#?}", id
, old
);
511 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
512 /// twice for the same `(AllocId, Allocation)` pair.
513 fn set_alloc_id_same_memory(&mut self, id
: AllocId
, mem
: &'tcx Allocation
) {
514 self.alloc_map
.insert_same(id
, GlobalAlloc
::Memory(mem
));
518 ////////////////////////////////////////////////////////////////////////////////
519 // Methods to access integers in the target endianness
520 ////////////////////////////////////////////////////////////////////////////////
523 pub fn write_target_uint(
525 mut target
: &mut [u8],
527 ) -> Result
<(), io
::Error
> {
528 let len
= target
.len();
530 Endian
::Little
=> target
.write_uint128
::<LittleEndian
>(data
, len
),
531 Endian
::Big
=> target
.write_uint128
::<BigEndian
>(data
, len
),
536 pub fn read_target_uint(endianness
: Endian
, mut source
: &[u8]) -> Result
<u128
, io
::Error
> {
538 Endian
::Little
=> source
.read_uint128
::<LittleEndian
>(source
.len()),
539 Endian
::Big
=> source
.read_uint128
::<BigEndian
>(source
.len()),
543 ////////////////////////////////////////////////////////////////////////////////
544 // Methods to facilitate working with signed integers stored in a u128
545 ////////////////////////////////////////////////////////////////////////////////
547 /// Truncates `value` to `size` bits and then sign-extend it to 128 bits
548 /// (i.e., if it is negative, fill with 1's on the left).
550 pub fn sign_extend(value
: u128
, size
: Size
) -> u128
{
551 let size
= size
.bits();
553 // Truncated until nothing is left.
557 let shift
= 128 - size
;
558 // Shift the unsigned value to the left, then shift back to the right as signed
559 // (essentially fills with FF on the left).
560 (((value
<< shift
) as i128
) >> shift
) as u128
563 /// Truncates `value` to `size` bits.
565 pub fn truncate(value
: u128
, size
: Size
) -> u128
{
566 let size
= size
.bits();
568 // Truncated until nothing is left.
571 let shift
= 128 - size
;
572 // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
573 (value
<< shift
) >> shift