]> git.proxmox.com Git - rustc.git/blob - src/librustc_middle/mir/interpret/mod.rs
New upstream version 1.44.1+dfsg1
[rustc.git] / src / librustc_middle / mir / interpret / mod.rs
1 //! An interpreter for MIR used in CTFE and by miri.
2
3 #[macro_export]
4 macro_rules! err_unsup {
5 ($($tt:tt)*) => {
6 $crate::mir::interpret::InterpError::Unsupported(
7 $crate::mir::interpret::UnsupportedOpInfo::$($tt)*
8 )
9 };
10 }
11
12 #[macro_export]
13 macro_rules! err_unsup_format {
14 ($($tt:tt)*) => { err_unsup!(Unsupported(format!($($tt)*))) };
15 }
16
17 #[macro_export]
18 macro_rules! err_inval {
19 ($($tt:tt)*) => {
20 $crate::mir::interpret::InterpError::InvalidProgram(
21 $crate::mir::interpret::InvalidProgramInfo::$($tt)*
22 )
23 };
24 }
25
26 #[macro_export]
27 macro_rules! err_ub {
28 ($($tt:tt)*) => {
29 $crate::mir::interpret::InterpError::UndefinedBehavior(
30 $crate::mir::interpret::UndefinedBehaviorInfo::$($tt)*
31 )
32 };
33 }
34
35 #[macro_export]
36 macro_rules! err_ub_format {
37 ($($tt:tt)*) => { err_ub!(Ub(format!($($tt)*))) };
38 }
39
40 #[macro_export]
41 macro_rules! err_exhaust {
42 ($($tt:tt)*) => {
43 $crate::mir::interpret::InterpError::ResourceExhaustion(
44 $crate::mir::interpret::ResourceExhaustionInfo::$($tt)*
45 )
46 };
47 }
48
49 #[macro_export]
50 macro_rules! err_machine_stop {
51 ($($tt:tt)*) => {
52 $crate::mir::interpret::InterpError::MachineStop(Box::new($($tt)*))
53 };
54 }
55
56 // In the `throw_*` macros, avoid `return` to make them work with `try {}`.
57 #[macro_export]
58 macro_rules! throw_unsup {
59 ($($tt:tt)*) => { Err::<!, _>(err_unsup!($($tt)*))? };
60 }
61
62 #[macro_export]
63 macro_rules! throw_unsup_format {
64 ($($tt:tt)*) => { throw_unsup!(Unsupported(format!($($tt)*))) };
65 }
66
67 #[macro_export]
68 macro_rules! throw_inval {
69 ($($tt:tt)*) => { Err::<!, _>(err_inval!($($tt)*))? };
70 }
71
72 #[macro_export]
73 macro_rules! throw_ub {
74 ($($tt:tt)*) => { Err::<!, _>(err_ub!($($tt)*))? };
75 }
76
77 #[macro_export]
78 macro_rules! throw_ub_format {
79 ($($tt:tt)*) => { throw_ub!(Ub(format!($($tt)*))) };
80 }
81
82 #[macro_export]
83 macro_rules! throw_exhaust {
84 ($($tt:tt)*) => { Err::<!, _>(err_exhaust!($($tt)*))? };
85 }
86
87 #[macro_export]
88 macro_rules! throw_machine_stop {
89 ($($tt:tt)*) => { Err::<!, _>(err_machine_stop!($($tt)*))? };
90 }
91
92 mod allocation;
93 mod error;
94 mod pointer;
95 mod queries;
96 mod value;
97
98 use std::convert::TryFrom;
99 use std::fmt;
100 use std::io;
101 use std::num::NonZeroU32;
102 use std::sync::atomic::{AtomicU32, Ordering};
103
104 use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
105 use rustc_ast::ast::LitKind;
106 use rustc_data_structures::fx::FxHashMap;
107 use rustc_data_structures::sync::{HashMapExt, Lock};
108 use rustc_data_structures::tiny_list::TinyList;
109 use rustc_hir::def_id::DefId;
110 use rustc_macros::HashStable;
111 use rustc_serialize::{Decodable, Encodable, Encoder};
112 use rustc_target::abi::{Endian, Size};
113
114 use crate::mir;
115 use crate::ty::codec::TyDecoder;
116 use crate::ty::subst::GenericArgKind;
117 use crate::ty::{self, Instance, Ty, TyCtxt};
118
119 pub use self::error::{
120 struct_error, ConstEvalErr, ConstEvalRawResult, ConstEvalResult, ErrorHandled, FrameInfo,
121 InterpError, InterpErrorInfo, InterpResult, InvalidProgramInfo, MachineStopType,
122 ResourceExhaustionInfo, UndefinedBehaviorInfo, UnsupportedOpInfo,
123 };
124
125 pub use self::value::{get_slice_bytes, ConstValue, RawConst, Scalar, ScalarMaybeUndef};
126
127 pub use self::allocation::{Allocation, AllocationExtra, Relocations, UndefMask};
128
129 pub use self::pointer::{CheckInAllocMsg, Pointer, PointerArithmetic};
130
131 /// Uniquely identifies one of the following:
132 /// - A constant
133 /// - A static
134 /// - A const fn where all arguments (if any) are zero-sized types
135 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, RustcEncodable, RustcDecodable)]
136 #[derive(HashStable, Lift)]
137 pub struct GlobalId<'tcx> {
138 /// For a constant or static, the `Instance` of the item itself.
139 /// For a promoted global, the `Instance` of the function they belong to.
140 pub instance: ty::Instance<'tcx>,
141
142 /// The index for promoted globals within their function's `mir::Body`.
143 pub promoted: Option<mir::Promoted>,
144 }
145
146 /// Input argument for `tcx.lit_to_const`.
147 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
148 pub struct LitToConstInput<'tcx> {
149 /// The absolute value of the resultant constant.
150 pub lit: &'tcx LitKind,
151 /// The type of the constant.
152 pub ty: Ty<'tcx>,
153 /// If the constant is negative.
154 pub neg: bool,
155 }
156
157 /// Error type for `tcx.lit_to_const`.
158 #[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)]
159 pub enum LitToConstError {
160 /// The literal's inferred type did not match the expected `ty` in the input.
161 /// This is used for graceful error handling (`delay_span_bug`) in
162 /// type checking (`Const::from_anon_const`).
163 TypeError,
164 UnparseableFloat,
165 Reported,
166 }
167
168 #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
169 pub struct AllocId(pub u64);
170
171 impl fmt::Debug for AllocId {
172 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
173 fmt::Display::fmt(self, fmt)
174 }
175 }
176
177 impl fmt::Display for AllocId {
178 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
179 write!(f, "alloc{}", self.0)
180 }
181 }
182
183 impl rustc_serialize::UseSpecializedEncodable for AllocId {}
184 impl rustc_serialize::UseSpecializedDecodable for AllocId {}
185
186 #[derive(RustcDecodable, RustcEncodable)]
187 enum AllocDiscriminant {
188 Alloc,
189 Fn,
190 Static,
191 }
192
193 pub fn specialized_encode_alloc_id<'tcx, E: Encoder>(
194 encoder: &mut E,
195 tcx: TyCtxt<'tcx>,
196 alloc_id: AllocId,
197 ) -> Result<(), E::Error> {
198 let alloc: GlobalAlloc<'tcx> =
199 tcx.alloc_map.lock().get(alloc_id).expect("no value for given alloc ID");
200 match alloc {
201 GlobalAlloc::Memory(alloc) => {
202 trace!("encoding {:?} with {:#?}", alloc_id, alloc);
203 AllocDiscriminant::Alloc.encode(encoder)?;
204 alloc.encode(encoder)?;
205 }
206 GlobalAlloc::Function(fn_instance) => {
207 trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
208 AllocDiscriminant::Fn.encode(encoder)?;
209 fn_instance.encode(encoder)?;
210 }
211 GlobalAlloc::Static(did) => {
212 // References to statics doesn't need to know about their allocations,
213 // just about its `DefId`.
214 AllocDiscriminant::Static.encode(encoder)?;
215 did.encode(encoder)?;
216 }
217 }
218 Ok(())
219 }
220
221 // Used to avoid infinite recursion when decoding cyclic allocations.
222 type DecodingSessionId = NonZeroU32;
223
224 #[derive(Clone)]
225 enum State {
226 Empty,
227 InProgressNonAlloc(TinyList<DecodingSessionId>),
228 InProgress(TinyList<DecodingSessionId>, AllocId),
229 Done(AllocId),
230 }
231
232 pub struct AllocDecodingState {
233 // For each `AllocId`, we keep track of which decoding state it's currently in.
234 decoding_state: Vec<Lock<State>>,
235 // The offsets of each allocation in the data stream.
236 data_offsets: Vec<u32>,
237 }
238
239 impl AllocDecodingState {
240 pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
241 static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0);
242 let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
243
244 // Make sure this is never zero.
245 let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
246
247 AllocDecodingSession { state: self, session_id }
248 }
249
250 pub fn new(data_offsets: Vec<u32>) -> Self {
251 let decoding_state = vec![Lock::new(State::Empty); data_offsets.len()];
252
253 Self { decoding_state, data_offsets }
254 }
255 }
256
257 #[derive(Copy, Clone)]
258 pub struct AllocDecodingSession<'s> {
259 state: &'s AllocDecodingState,
260 session_id: DecodingSessionId,
261 }
262
263 impl<'s> AllocDecodingSession<'s> {
264 /// Decodes an `AllocId` in a thread-safe way.
265 pub fn decode_alloc_id<D>(&self, decoder: &mut D) -> Result<AllocId, D::Error>
266 where
267 D: TyDecoder<'tcx>,
268 {
269 // Read the index of the allocation.
270 let idx = usize::try_from(decoder.read_u32()?).unwrap();
271 let pos = usize::try_from(self.state.data_offsets[idx]).unwrap();
272
273 // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
274 // `AllocId`.
275 let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
276 let alloc_kind = AllocDiscriminant::decode(decoder)?;
277 Ok((alloc_kind, decoder.position()))
278 })?;
279
280 // Check the decoding state to see if it's already decoded or if we should
281 // decode it here.
282 let alloc_id = {
283 let mut entry = self.state.decoding_state[idx].lock();
284
285 match *entry {
286 State::Done(alloc_id) => {
287 return Ok(alloc_id);
288 }
289 ref mut entry @ State::Empty => {
290 // We are allowed to decode.
291 match alloc_kind {
292 AllocDiscriminant::Alloc => {
293 // If this is an allocation, we need to reserve an
294 // `AllocId` so we can decode cyclic graphs.
295 let alloc_id = decoder.tcx().alloc_map.lock().reserve();
296 *entry =
297 State::InProgress(TinyList::new_single(self.session_id), alloc_id);
298 Some(alloc_id)
299 }
300 AllocDiscriminant::Fn | AllocDiscriminant::Static => {
301 // Fns and statics cannot be cyclic, and their `AllocId`
302 // is determined later by interning.
303 *entry =
304 State::InProgressNonAlloc(TinyList::new_single(self.session_id));
305 None
306 }
307 }
308 }
309 State::InProgressNonAlloc(ref mut sessions) => {
310 if sessions.contains(&self.session_id) {
311 bug!("this should be unreachable");
312 } else {
313 // Start decoding concurrently.
314 sessions.insert(self.session_id);
315 None
316 }
317 }
318 State::InProgress(ref mut sessions, alloc_id) => {
319 if sessions.contains(&self.session_id) {
320 // Don't recurse.
321 return Ok(alloc_id);
322 } else {
323 // Start decoding concurrently.
324 sessions.insert(self.session_id);
325 Some(alloc_id)
326 }
327 }
328 }
329 };
330
331 // Now decode the actual data.
332 let alloc_id = decoder.with_position(pos, |decoder| {
333 match alloc_kind {
334 AllocDiscriminant::Alloc => {
335 let alloc = <&'tcx Allocation as Decodable>::decode(decoder)?;
336 // We already have a reserved `AllocId`.
337 let alloc_id = alloc_id.unwrap();
338 trace!("decoded alloc {:?}: {:#?}", alloc_id, alloc);
339 decoder.tcx().alloc_map.lock().set_alloc_id_same_memory(alloc_id, alloc);
340 Ok(alloc_id)
341 }
342 AllocDiscriminant::Fn => {
343 assert!(alloc_id.is_none());
344 trace!("creating fn alloc ID");
345 let instance = ty::Instance::decode(decoder)?;
346 trace!("decoded fn alloc instance: {:?}", instance);
347 let alloc_id = decoder.tcx().alloc_map.lock().create_fn_alloc(instance);
348 Ok(alloc_id)
349 }
350 AllocDiscriminant::Static => {
351 assert!(alloc_id.is_none());
352 trace!("creating extern static alloc ID");
353 let did = DefId::decode(decoder)?;
354 trace!("decoded static def-ID: {:?}", did);
355 let alloc_id = decoder.tcx().alloc_map.lock().create_static_alloc(did);
356 Ok(alloc_id)
357 }
358 }
359 })?;
360
361 self.state.decoding_state[idx].with_lock(|entry| {
362 *entry = State::Done(alloc_id);
363 });
364
365 Ok(alloc_id)
366 }
367 }
368
369 /// An allocation in the global (tcx-managed) memory can be either a function pointer,
370 /// a static, or a "real" allocation with some data in it.
371 #[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable, HashStable)]
372 pub enum GlobalAlloc<'tcx> {
373 /// The alloc ID is used as a function pointer.
374 Function(Instance<'tcx>),
375 /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
376 /// This is also used to break the cycle in recursive statics.
377 Static(DefId),
378 /// The alloc ID points to memory.
379 Memory(&'tcx Allocation),
380 }
381
382 pub struct AllocMap<'tcx> {
383 /// Maps `AllocId`s to their corresponding allocations.
384 alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
385
386 /// Used to ensure that statics and functions only get one associated `AllocId`.
387 /// Should never contain a `GlobalAlloc::Memory`!
388 //
389 // FIXME: Should we just have two separate dedup maps for statics and functions each?
390 dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
391
392 /// The `AllocId` to assign to the next requested ID.
393 /// Always incremented; never gets smaller.
394 next_id: AllocId,
395 }
396
397 impl<'tcx> AllocMap<'tcx> {
398 pub fn new() -> Self {
399 AllocMap { alloc_map: Default::default(), dedup: Default::default(), next_id: AllocId(0) }
400 }
401
402 /// Obtains a new allocation ID that can be referenced but does not
403 /// yet have an allocation backing it.
404 ///
405 /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
406 /// an `AllocId` from a query.
407 pub fn reserve(&mut self) -> AllocId {
408 let next = self.next_id;
409 self.next_id.0 = self.next_id.0.checked_add(1).expect(
410 "You overflowed a u64 by incrementing by 1... \
411 You've just earned yourself a free drink if we ever meet. \
412 Seriously, how did you do that?!",
413 );
414 next
415 }
416
417 /// Reserves a new ID *if* this allocation has not been dedup-reserved before.
418 /// Should only be used for function pointers and statics, we don't want
419 /// to dedup IDs for "real" memory!
420 fn reserve_and_set_dedup(&mut self, alloc: GlobalAlloc<'tcx>) -> AllocId {
421 match alloc {
422 GlobalAlloc::Function(..) | GlobalAlloc::Static(..) => {}
423 GlobalAlloc::Memory(..) => bug!("Trying to dedup-reserve memory with real data!"),
424 }
425 if let Some(&alloc_id) = self.dedup.get(&alloc) {
426 return alloc_id;
427 }
428 let id = self.reserve();
429 debug!("creating alloc {:?} with id {}", alloc, id);
430 self.alloc_map.insert(id, alloc.clone());
431 self.dedup.insert(alloc, id);
432 id
433 }
434
435 /// Generates an `AllocId` for a static or return a cached one in case this function has been
436 /// called on the same static before.
437 pub fn create_static_alloc(&mut self, static_id: DefId) -> AllocId {
438 self.reserve_and_set_dedup(GlobalAlloc::Static(static_id))
439 }
440
441 /// Generates an `AllocId` for a function. Depending on the function type,
442 /// this might get deduplicated or assigned a new ID each time.
443 pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> AllocId {
444 // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
445 // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
446 // duplicated across crates.
447 // We thus generate a new `AllocId` for every mention of a function. This means that
448 // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
449 // However, formatting code relies on function identity (see #58320), so we only do
450 // this for generic functions. Lifetime parameters are ignored.
451 let is_generic = instance.substs.into_iter().any(|kind| match kind.unpack() {
452 GenericArgKind::Lifetime(_) => false,
453 _ => true,
454 });
455 if is_generic {
456 // Get a fresh ID.
457 let id = self.reserve();
458 self.alloc_map.insert(id, GlobalAlloc::Function(instance));
459 id
460 } else {
461 // Deduplicate.
462 self.reserve_and_set_dedup(GlobalAlloc::Function(instance))
463 }
464 }
465
466 /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
467 /// `Allocation` with a different `AllocId`.
468 /// Statics with identical content will still point to the same `Allocation`, i.e.,
469 /// their data will be deduplicated through `Allocation` interning -- but they
470 /// are different places in memory and as such need different IDs.
471 pub fn create_memory_alloc(&mut self, mem: &'tcx Allocation) -> AllocId {
472 let id = self.reserve();
473 self.set_alloc_id_memory(id, mem);
474 id
475 }
476
477 /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
478 /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
479 /// illegal and will likely ICE.
480 /// This function exists to allow const eval to detect the difference between evaluation-
481 /// local dangling pointers and allocations in constants/statics.
482 #[inline]
483 pub fn get(&self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
484 self.alloc_map.get(&id).cloned()
485 }
486
487 /// Panics if the `AllocId` does not refer to an `Allocation`
488 pub fn unwrap_memory(&self, id: AllocId) -> &'tcx Allocation {
489 match self.get(id) {
490 Some(GlobalAlloc::Memory(mem)) => mem,
491 _ => bug!("expected allocation ID {} to point to memory", id),
492 }
493 }
494
495 /// Panics if the `AllocId` does not refer to a function
496 pub fn unwrap_fn(&self, id: AllocId) -> Instance<'tcx> {
497 match self.get(id) {
498 Some(GlobalAlloc::Function(instance)) => instance,
499 _ => bug!("expected allocation ID {} to point to a function", id),
500 }
501 }
502
503 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
504 /// call this function twice, even with the same `Allocation` will ICE the compiler.
505 pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
506 if let Some(old) = self.alloc_map.insert(id, GlobalAlloc::Memory(mem)) {
507 bug!("tried to set allocation ID {}, but it was already existing as {:#?}", id, old);
508 }
509 }
510
511 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
512 /// twice for the same `(AllocId, Allocation)` pair.
513 fn set_alloc_id_same_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
514 self.alloc_map.insert_same(id, GlobalAlloc::Memory(mem));
515 }
516 }
517
518 ////////////////////////////////////////////////////////////////////////////////
519 // Methods to access integers in the target endianness
520 ////////////////////////////////////////////////////////////////////////////////
521
522 #[inline]
523 pub fn write_target_uint(
524 endianness: Endian,
525 mut target: &mut [u8],
526 data: u128,
527 ) -> Result<(), io::Error> {
528 let len = target.len();
529 match endianness {
530 Endian::Little => target.write_uint128::<LittleEndian>(data, len),
531 Endian::Big => target.write_uint128::<BigEndian>(data, len),
532 }
533 }
534
535 #[inline]
536 pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
537 match endianness {
538 Endian::Little => source.read_uint128::<LittleEndian>(source.len()),
539 Endian::Big => source.read_uint128::<BigEndian>(source.len()),
540 }
541 }
542
543 ////////////////////////////////////////////////////////////////////////////////
544 // Methods to facilitate working with signed integers stored in a u128
545 ////////////////////////////////////////////////////////////////////////////////
546
547 /// Truncates `value` to `size` bits and then sign-extend it to 128 bits
548 /// (i.e., if it is negative, fill with 1's on the left).
549 #[inline]
550 pub fn sign_extend(value: u128, size: Size) -> u128 {
551 let size = size.bits();
552 if size == 0 {
553 // Truncated until nothing is left.
554 return 0;
555 }
556 // Sign-extend it.
557 let shift = 128 - size;
558 // Shift the unsigned value to the left, then shift back to the right as signed
559 // (essentially fills with FF on the left).
560 (((value << shift) as i128) >> shift) as u128
561 }
562
563 /// Truncates `value` to `size` bits.
564 #[inline]
565 pub fn truncate(value: u128, size: Size) -> u128 {
566 let size = size.bits();
567 if size == 0 {
568 // Truncated until nothing is left.
569 return 0;
570 }
571 let shift = 128 - size;
572 // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
573 (value << shift) >> shift
574 }