]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_middle/src/mir/interpret/mod.rs
New upstream version 1.74.1+dfsg1
[rustc.git] / compiler / rustc_middle / src / mir / interpret / mod.rs
CommitLineData
e1599b0c 1//! An interpreter for MIR used in CTFE and by miri.
ff7c6d11
XL
2
3#[macro_export]
416331ca
XL
4macro_rules! err_unsup {
5 ($($tt:tt)*) => {
6 $crate::mir::interpret::InterpError::Unsupported(
7 $crate::mir::interpret::UnsupportedOpInfo::$($tt)*
8 )
9 };
10}
11
12#[macro_export]
13macro_rules! err_unsup_format {
14 ($($tt:tt)*) => { err_unsup!(Unsupported(format!($($tt)*))) };
15}
16
17#[macro_export]
18macro_rules! err_inval {
19 ($($tt:tt)*) => {
20 $crate::mir::interpret::InterpError::InvalidProgram(
21 $crate::mir::interpret::InvalidProgramInfo::$($tt)*
22 )
23 };
24}
25
26#[macro_export]
27macro_rules! err_ub {
28 ($($tt:tt)*) => {
29 $crate::mir::interpret::InterpError::UndefinedBehavior(
30 $crate::mir::interpret::UndefinedBehaviorInfo::$($tt)*
31 )
32 };
33}
34
35#[macro_export]
36macro_rules! err_ub_format {
37 ($($tt:tt)*) => { err_ub!(Ub(format!($($tt)*))) };
38}
39
416331ca
XL
40#[macro_export]
41macro_rules! err_exhaust {
42 ($($tt:tt)*) => {
43 $crate::mir::interpret::InterpError::ResourceExhaustion(
44 $crate::mir::interpret::ResourceExhaustionInfo::$($tt)*
45 )
46 };
47}
48
ba9703b0
XL
49#[macro_export]
50macro_rules! err_machine_stop {
51 ($($tt:tt)*) => {
52 $crate::mir::interpret::InterpError::MachineStop(Box::new($($tt)*))
53 };
54}
55
56// In the `throw_*` macros, avoid `return` to make them work with `try {}`.
416331ca
XL
57#[macro_export]
58macro_rules! throw_unsup {
923072b8 59 ($($tt:tt)*) => { do yeet err_unsup!($($tt)*) };
416331ca
XL
60}
61
62#[macro_export]
63macro_rules! throw_unsup_format {
64 ($($tt:tt)*) => { throw_unsup!(Unsupported(format!($($tt)*))) };
65}
66
67#[macro_export]
68macro_rules! throw_inval {
923072b8 69 ($($tt:tt)*) => { do yeet err_inval!($($tt)*) };
416331ca
XL
70}
71
72#[macro_export]
73macro_rules! throw_ub {
923072b8 74 ($($tt:tt)*) => { do yeet err_ub!($($tt)*) };
416331ca
XL
75}
76
77#[macro_export]
78macro_rules! throw_ub_format {
79 ($($tt:tt)*) => { throw_ub!(Ub(format!($($tt)*))) };
80}
81
416331ca
XL
82#[macro_export]
83macro_rules! throw_exhaust {
923072b8 84 ($($tt:tt)*) => { do yeet err_exhaust!($($tt)*) };
ff7c6d11
XL
85}
86
60c5eb7d
XL
87#[macro_export]
88macro_rules! throw_machine_stop {
923072b8 89 ($($tt:tt)*) => { do yeet err_machine_stop!($($tt)*) };
60c5eb7d
XL
90}
91
fe692bf9
FG
92#[macro_export]
93macro_rules! err_ub_custom {
94 ($msg:expr $(, $($name:ident = $value:expr),* $(,)?)?) => {{
95 $(
96 let ($($name,)*) = ($($value,)*);
97 )?
98 err_ub!(Custom(
99 rustc_middle::error::CustomSubdiagnostic {
100 msg: || $msg,
101 add_args: Box::new(move |mut set_arg| {
102 $($(
103 set_arg(stringify!($name).into(), rustc_errors::IntoDiagnosticArg::into_diagnostic_arg($name));
104 )*)?
105 })
106 }
107 ))
108 }};
109}
110
111#[macro_export]
112macro_rules! throw_ub_custom {
113 ($($tt:tt)*) => { do yeet err_ub_custom!($($tt)*) };
114}
115
a1dfa0c6 116mod allocation;
dfeec247 117mod error;
a1dfa0c6 118mod pointer;
dfeec247
XL
119mod queries;
120mod value;
ff7c6d11 121
ba9703b0
XL
122use std::fmt;
123use std::io;
1b1a35ee 124use std::io::{Read, Write};
136023e0 125use std::num::{NonZeroU32, NonZeroU64};
ba9703b0 126use std::sync::atomic::{AtomicU32, Ordering};
ff7c6d11 127
3dfed10e 128use rustc_ast::LitKind;
94b46f34 129use rustc_data_structures::fx::FxHashMap;
dfeec247 130use rustc_data_structures::sync::{HashMapExt, Lock};
94b46f34 131use rustc_data_structures::tiny_list::TinyList;
487cf647 132use rustc_errors::ErrorGuaranteed;
dfeec247 133use rustc_hir::def_id::DefId;
532ac7d7 134use rustc_macros::HashStable;
1b1a35ee 135use rustc_middle::ty::print::with_no_trimmed_paths;
3dfed10e 136use rustc_serialize::{Decodable, Encodable};
9ffffee4 137use rustc_target::abi::{AddressSpace, Endian, HasDataLayout};
ba9703b0
XL
138
139use crate::mir;
3dfed10e 140use crate::ty::codec::{TyDecoder, TyEncoder};
add651ee 141use crate::ty::GenericArgKind;
ba9703b0
XL
142use crate::ty::{self, Instance, Ty, TyCtxt};
143
144pub use self::error::{
add651ee
FG
145 struct_error, BadBytesAccess, CheckInAllocMsg, ErrorHandled, EvalToAllocationRawResult,
146 EvalToConstValueResult, EvalToValTreeResult, ExpectedKind, InterpError, InterpErrorInfo,
147 InterpResult, InvalidMetaKind, InvalidProgramInfo, MachineStopType, PointerKind,
148 ReportedErrorInfo, ResourceExhaustionInfo, ScalarSizeMismatch, UndefinedBehaviorInfo,
149 UnsupportedOpInfo, ValidationErrorInfo, ValidationErrorKind,
ba9703b0
XL
150};
151
781aab86 152pub use self::value::Scalar;
ba9703b0 153
94222f64 154pub use self::allocation::{
9ffffee4
FG
155 alloc_range, AllocBytes, AllocError, AllocRange, AllocResult, Allocation, ConstAllocation,
156 InitChunk, InitChunkIter,
94222f64 157};
ba9703b0 158
136023e0 159pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
ff7c6d11 160
60c5eb7d
XL
161/// Uniquely identifies one of the following:
162/// - A constant
163/// - A static
3dfed10e 164#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, TyEncodable, TyDecodable)]
781aab86 165#[derive(HashStable, TypeFoldable, TypeVisitable)]
ff7c6d11
XL
166pub struct GlobalId<'tcx> {
167 /// For a constant or static, the `Instance` of the item itself.
168 /// For a promoted global, the `Instance` of the function they belong to.
169 pub instance: ty::Instance<'tcx>,
170
dc9dc135 171 /// The index for promoted globals within their function's `mir::Body`.
ff7c6d11
XL
172 pub promoted: Option<mir::Promoted>,
173}
174
a2a8927a 175impl<'tcx> GlobalId<'tcx> {
3dfed10e 176 pub fn display(self, tcx: TyCtxt<'tcx>) -> String {
5e7ed085 177 let instance_name = with_no_trimmed_paths!(tcx.def_path_str(self.instance.def.def_id()));
3dfed10e 178 if let Some(promoted) = self.promoted {
add651ee 179 format!("{instance_name}::{promoted:?}")
3dfed10e
XL
180 } else {
181 instance_name
182 }
183 }
184}
185
dfeec247
XL
186/// Input argument for `tcx.lit_to_const`.
187#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
188pub struct LitToConstInput<'tcx> {
189 /// The absolute value of the resultant constant.
190 pub lit: &'tcx LitKind,
191 /// The type of the constant.
192 pub ty: Ty<'tcx>,
193 /// If the constant is negative.
194 pub neg: bool,
195}
196
197/// Error type for `tcx.lit_to_const`.
198#[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)]
199pub enum LitToConstError {
200 /// The literal's inferred type did not match the expected `ty` in the input.
201 /// This is used for graceful error handling (`delay_span_bug`) in
ba9703b0 202 /// type checking (`Const::from_anon_const`).
dfeec247 203 TypeError,
487cf647 204 Reported(ErrorGuaranteed),
dfeec247
XL
205}
206
207#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
136023e0 208pub struct AllocId(pub NonZeroU64);
ff7c6d11 209
f9f354fc
XL
210// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
211// all the Miri types.
dfeec247 212impl fmt::Debug for AllocId {
f9f354fc
XL
213 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
214 if f.alternate() { write!(f, "a{}", self.0) } else { write!(f, "alloc{}", self.0) }
ba9703b0
XL
215 }
216}
217
064997fb 218// No "Display" since AllocIds are not usually user-visible.
dfeec247 219
3dfed10e 220#[derive(TyDecodable, TyEncodable)]
0731742a 221enum AllocDiscriminant {
0531ce1d
XL
222 Alloc,
223 Fn,
064997fb 224 VTable,
0531ce1d
XL
225 Static,
226}
227
923072b8 228pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>>(
0531ce1d 229 encoder: &mut E,
dc9dc135 230 tcx: TyCtxt<'tcx>,
0531ce1d 231 alloc_id: AllocId,
923072b8 232) {
f9f354fc 233 match tcx.global_alloc(alloc_id) {
dc9dc135 234 GlobalAlloc::Memory(alloc) => {
94b46f34 235 trace!("encoding {:?} with {:#?}", alloc_id, alloc);
923072b8
FG
236 AllocDiscriminant::Alloc.encode(encoder);
237 alloc.encode(encoder);
94b46f34 238 }
dc9dc135 239 GlobalAlloc::Function(fn_instance) => {
94b46f34 240 trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
923072b8
FG
241 AllocDiscriminant::Fn.encode(encoder);
242 fn_instance.encode(encoder);
94b46f34 243 }
064997fb
FG
244 GlobalAlloc::VTable(ty, poly_trait_ref) => {
245 trace!("encoding {:?} with {ty:#?}, {poly_trait_ref:#?}", alloc_id);
246 AllocDiscriminant::VTable.encode(encoder);
247 ty.encode(encoder);
248 poly_trait_ref.encode(encoder);
249 }
dc9dc135 250 GlobalAlloc::Static(did) => {
f9f354fc 251 assert!(!tcx.is_thread_local_static(did));
e1599b0c
XL
252 // References to statics doesn't need to know about their allocations,
253 // just about its `DefId`.
923072b8 254 AllocDiscriminant::Static.encode(encoder);
49aad941
FG
255 // Cannot use `did.encode(encoder)` because of a bug around
256 // specializations and method calls.
257 Encodable::<E>::encode(&did, encoder);
94b46f34 258 }
0531ce1d 259 }
0531ce1d
XL
260}
261
94b46f34
XL
262// Used to avoid infinite recursion when decoding cyclic allocations.
263type DecodingSessionId = NonZeroU32;
264
265#[derive(Clone)]
266enum State {
267 Empty,
268 InProgressNonAlloc(TinyList<DecodingSessionId>),
269 InProgress(TinyList<DecodingSessionId>, AllocId),
270 Done(AllocId),
271}
272
273pub struct AllocDecodingState {
e1599b0c
XL
274 // For each `AllocId`, we keep track of which decoding state it's currently in.
275 decoding_state: Vec<Lock<State>>,
94b46f34 276 // The offsets of each allocation in the data stream.
add651ee 277 data_offsets: Vec<u64>,
94b46f34
XL
278}
279
280impl AllocDecodingState {
17df50a5 281 #[inline]
0bf4aa26 282 pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
94b46f34
XL
283 static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0);
284 let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
285
e1599b0c 286 // Make sure this is never zero.
94b46f34
XL
287 let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
288
dfeec247 289 AllocDecodingSession { state: self, session_id }
94b46f34
XL
290 }
291
add651ee 292 pub fn new(data_offsets: Vec<u64>) -> Self {
353b0b11
FG
293 let decoding_state =
294 std::iter::repeat_with(|| Lock::new(State::Empty)).take(data_offsets.len()).collect();
94b46f34 295
dfeec247 296 Self { decoding_state, data_offsets }
94b46f34
XL
297 }
298}
299
300#[derive(Copy, Clone)]
301pub struct AllocDecodingSession<'s> {
302 state: &'s AllocDecodingState,
303 session_id: DecodingSessionId,
304}
305
306impl<'s> AllocDecodingSession<'s> {
e1599b0c 307 /// Decodes an `AllocId` in a thread-safe way.
5099ac24 308 pub fn decode_alloc_id<'tcx, D>(&self, decoder: &mut D) -> AllocId
dc9dc135 309 where
923072b8 310 D: TyDecoder<I = TyCtxt<'tcx>>,
94b46f34 311 {
e1599b0c 312 // Read the index of the allocation.
5099ac24 313 let idx = usize::try_from(decoder.read_u32()).unwrap();
ba9703b0 314 let pos = usize::try_from(self.state.data_offsets[idx]).unwrap();
94b46f34 315
e1599b0c
XL
316 // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
317 // `AllocId`.
94b46f34 318 let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
5099ac24
FG
319 let alloc_kind = AllocDiscriminant::decode(decoder);
320 (alloc_kind, decoder.position())
321 });
94b46f34 322
e1599b0c 323 // Check the decoding state to see if it's already decoded or if we should
94b46f34
XL
324 // decode it here.
325 let alloc_id = {
326 let mut entry = self.state.decoding_state[idx].lock();
327
328 match *entry {
329 State::Done(alloc_id) => {
5099ac24 330 return alloc_id;
94b46f34
XL
331 }
332 ref mut entry @ State::Empty => {
e1599b0c 333 // We are allowed to decode.
94b46f34 334 match alloc_kind {
0731742a 335 AllocDiscriminant::Alloc => {
94b46f34 336 // If this is an allocation, we need to reserve an
e1599b0c 337 // `AllocId` so we can decode cyclic graphs.
923072b8 338 let alloc_id = decoder.interner().reserve_alloc_id();
dfeec247
XL
339 *entry =
340 State::InProgress(TinyList::new_single(self.session_id), alloc_id);
94b46f34 341 Some(alloc_id)
dfeec247 342 }
064997fb
FG
343 AllocDiscriminant::Fn
344 | AllocDiscriminant::Static
345 | AllocDiscriminant::VTable => {
e1599b0c
XL
346 // Fns and statics cannot be cyclic, and their `AllocId`
347 // is determined later by interning.
dfeec247
XL
348 *entry =
349 State::InProgressNonAlloc(TinyList::new_single(self.session_id));
94b46f34
XL
350 None
351 }
352 }
353 }
354 State::InProgressNonAlloc(ref mut sessions) => {
355 if sessions.contains(&self.session_id) {
e1599b0c 356 bug!("this should be unreachable");
94b46f34 357 } else {
e1599b0c 358 // Start decoding concurrently.
94b46f34
XL
359 sessions.insert(self.session_id);
360 None
361 }
362 }
363 State::InProgress(ref mut sessions, alloc_id) => {
364 if sessions.contains(&self.session_id) {
365 // Don't recurse.
5099ac24 366 return alloc_id;
94b46f34 367 } else {
e1599b0c 368 // Start decoding concurrently.
94b46f34
XL
369 sessions.insert(self.session_id);
370 Some(alloc_id)
371 }
372 }
373 }
374 };
375
e1599b0c 376 // Now decode the actual data.
94b46f34
XL
377 let alloc_id = decoder.with_position(pos, |decoder| {
378 match alloc_kind {
0731742a 379 AllocDiscriminant::Alloc => {
5e7ed085 380 let alloc = <ConstAllocation<'tcx> as Decodable<_>>::decode(decoder);
e1599b0c 381 // We already have a reserved `AllocId`.
94b46f34 382 let alloc_id = alloc_id.unwrap();
e1599b0c 383 trace!("decoded alloc {:?}: {:#?}", alloc_id, alloc);
923072b8 384 decoder.interner().set_alloc_id_same_memory(alloc_id, alloc);
5099ac24 385 alloc_id
dfeec247 386 }
0731742a 387 AllocDiscriminant::Fn => {
94b46f34 388 assert!(alloc_id.is_none());
e1599b0c 389 trace!("creating fn alloc ID");
5099ac24 390 let instance = ty::Instance::decode(decoder);
94b46f34 391 trace!("decoded fn alloc instance: {:?}", instance);
781aab86 392 let alloc_id = decoder.interner().reserve_and_set_fn_alloc(instance);
5099ac24 393 alloc_id
dfeec247 394 }
064997fb
FG
395 AllocDiscriminant::VTable => {
396 assert!(alloc_id.is_none());
397 trace!("creating vtable alloc ID");
398 let ty = <Ty<'_> as Decodable<D>>::decode(decoder);
399 let poly_trait_ref =
400 <Option<ty::PolyExistentialTraitRef<'_>> as Decodable<D>>::decode(decoder);
401 trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}");
781aab86
FG
402 let alloc_id =
403 decoder.interner().reserve_and_set_vtable_alloc(ty, poly_trait_ref);
064997fb
FG
404 alloc_id
405 }
0731742a 406 AllocDiscriminant::Static => {
94b46f34 407 assert!(alloc_id.is_none());
e1599b0c 408 trace!("creating extern static alloc ID");
5099ac24 409 let did = <DefId as Decodable<D>>::decode(decoder);
e1599b0c 410 trace!("decoded static def-ID: {:?}", did);
781aab86 411 let alloc_id = decoder.interner().reserve_and_set_static_alloc(did);
5099ac24 412 alloc_id
94b46f34
XL
413 }
414 }
5099ac24 415 });
94b46f34
XL
416
417 self.state.decoding_state[idx].with_lock(|entry| {
418 *entry = State::Done(alloc_id);
419 });
420
5099ac24 421 alloc_id
0531ce1d
XL
422 }
423}
424
dc9dc135
XL
425/// An allocation in the global (tcx-managed) memory can be either a function pointer,
426/// a static, or a "real" allocation with some data in it.
3dfed10e 427#[derive(Debug, Clone, Eq, PartialEq, Hash, TyDecodable, TyEncodable, HashStable)]
dc9dc135 428pub enum GlobalAlloc<'tcx> {
e1599b0c 429 /// The alloc ID is used as a function pointer.
94b46f34 430 Function(Instance<'tcx>),
064997fb
FG
431 /// This alloc ID points to a symbolic (not-reified) vtable.
432 VTable(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>),
9fa01778 433 /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
b7449926 434 /// This is also used to break the cycle in recursive statics.
94b46f34 435 Static(DefId),
9fa01778 436 /// The alloc ID points to memory.
5e7ed085 437 Memory(ConstAllocation<'tcx>),
94b46f34
XL
438}
439
a2a8927a 440impl<'tcx> GlobalAlloc<'tcx> {
f9f354fc
XL
441 /// Panics if the `GlobalAlloc` does not refer to an `GlobalAlloc::Memory`
442 #[track_caller]
443 #[inline]
5e7ed085 444 pub fn unwrap_memory(&self) -> ConstAllocation<'tcx> {
f9f354fc
XL
445 match *self {
446 GlobalAlloc::Memory(mem) => mem,
447 _ => bug!("expected memory, got {:?}", self),
448 }
449 }
450
451 /// Panics if the `GlobalAlloc` is not `GlobalAlloc::Function`
452 #[track_caller]
453 #[inline]
454 pub fn unwrap_fn(&self) -> Instance<'tcx> {
455 match *self {
456 GlobalAlloc::Function(instance) => instance,
457 _ => bug!("expected function, got {:?}", self),
458 }
459 }
064997fb
FG
460
461 /// Panics if the `GlobalAlloc` is not `GlobalAlloc::VTable`
462 #[track_caller]
463 #[inline]
464 pub fn unwrap_vtable(&self) -> (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>) {
465 match *self {
466 GlobalAlloc::VTable(ty, poly_trait_ref) => (ty, poly_trait_ref),
467 _ => bug!("expected vtable, got {:?}", self),
468 }
469 }
9ffffee4
FG
470
471 /// The address space that this `GlobalAlloc` should be placed in.
472 #[inline]
473 pub fn address_space(&self, cx: &impl HasDataLayout) -> AddressSpace {
474 match self {
475 GlobalAlloc::Function(..) => cx.data_layout().instruction_address_space,
476 GlobalAlloc::Static(..) | GlobalAlloc::Memory(..) | GlobalAlloc::VTable(..) => {
477 AddressSpace::DATA
478 }
479 }
480 }
f9f354fc
XL
481}
482
923072b8 483pub(crate) struct AllocMap<'tcx> {
e1599b0c 484 /// Maps `AllocId`s to their corresponding allocations.
dc9dc135 485 alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
94b46f34 486
dc9dc135
XL
487 /// Used to ensure that statics and functions only get one associated `AllocId`.
488 /// Should never contain a `GlobalAlloc::Memory`!
e1599b0c
XL
489 //
490 // FIXME: Should we just have two separate dedup maps for statics and functions each?
dc9dc135 491 dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
94b46f34 492
9fa01778 493 /// The `AllocId` to assign to the next requested ID.
e1599b0c 494 /// Always incremented; never gets smaller.
94b46f34
XL
495 next_id: AllocId,
496}
497
0731742a 498impl<'tcx> AllocMap<'tcx> {
923072b8 499 pub(crate) fn new() -> Self {
136023e0
XL
500 AllocMap {
501 alloc_map: Default::default(),
502 dedup: Default::default(),
503 next_id: AllocId(NonZeroU64::new(1).unwrap()),
504 }
94b46f34 505 }
f9f354fc 506 fn reserve(&mut self) -> AllocId {
94b46f34 507 let next = self.next_id;
dfeec247
XL
508 self.next_id.0 = self.next_id.0.checked_add(1).expect(
509 "You overflowed a u64 by incrementing by 1... \
510 You've just earned yourself a free drink if we ever meet. \
511 Seriously, how did you do that?!",
512 );
94b46f34
XL
513 next
514 }
f9f354fc
XL
515}
516
517impl<'tcx> TyCtxt<'tcx> {
518 /// Obtains a new allocation ID that can be referenced but does not
519 /// yet have an allocation backing it.
520 ///
521 /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
522 /// an `AllocId` from a query.
1b1a35ee 523 pub fn reserve_alloc_id(self) -> AllocId {
f9f354fc
XL
524 self.alloc_map.lock().reserve()
525 }
94b46f34 526
e1599b0c 527 /// Reserves a new ID *if* this allocation has not been dedup-reserved before.
064997fb
FG
528 /// Should only be used for "symbolic" allocations (function pointers, vtables, statics), we
529 /// don't want to dedup IDs for "real" memory!
1b1a35ee 530 fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>) -> AllocId {
f9f354fc 531 let mut alloc_map = self.alloc_map.lock();
dc9dc135 532 match alloc {
064997fb 533 GlobalAlloc::Function(..) | GlobalAlloc::Static(..) | GlobalAlloc::VTable(..) => {}
dc9dc135
XL
534 GlobalAlloc::Memory(..) => bug!("Trying to dedup-reserve memory with real data!"),
535 }
f9f354fc 536 if let Some(&alloc_id) = alloc_map.dedup.get(&alloc) {
94b46f34
XL
537 return alloc_id;
538 }
f9f354fc 539 let id = alloc_map.reserve();
064997fb 540 debug!("creating alloc {alloc:?} with id {id:?}");
f9f354fc
XL
541 alloc_map.alloc_map.insert(id, alloc.clone());
542 alloc_map.dedup.insert(alloc, id);
94b46f34
XL
543 id
544 }
545
dc9dc135
XL
546 /// Generates an `AllocId` for a static or return a cached one in case this function has been
547 /// called on the same static before.
781aab86 548 pub fn reserve_and_set_static_alloc(self, static_id: DefId) -> AllocId {
dc9dc135
XL
549 self.reserve_and_set_dedup(GlobalAlloc::Static(static_id))
550 }
551
9c376795 552 /// Generates an `AllocId` for a function. Depending on the function type,
dc9dc135 553 /// this might get deduplicated or assigned a new ID each time.
781aab86 554 pub fn reserve_and_set_fn_alloc(self, instance: Instance<'tcx>) -> AllocId {
9fa01778
XL
555 // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
556 // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
557 // duplicated across crates.
558 // We thus generate a new `AllocId` for every mention of a function. This means that
559 // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
560 // However, formatting code relies on function identity (see #58320), so we only do
9c376795 561 // this for generic functions. Lifetime parameters are ignored.
29967ef6 562 let is_generic = instance
add651ee 563 .args
29967ef6
XL
564 .into_iter()
565 .any(|kind| !matches!(kind.unpack(), GenericArgKind::Lifetime(_)));
9fa01778 566 if is_generic {
e1599b0c 567 // Get a fresh ID.
f9f354fc
XL
568 let mut alloc_map = self.alloc_map.lock();
569 let id = alloc_map.reserve();
570 alloc_map.alloc_map.insert(id, GlobalAlloc::Function(instance));
9fa01778
XL
571 id
572 } else {
e1599b0c 573 // Deduplicate.
dc9dc135 574 self.reserve_and_set_dedup(GlobalAlloc::Function(instance))
9fa01778 575 }
94b46f34
XL
576 }
577
9c376795 578 /// Generates an `AllocId` for a (symbolic, not-reified) vtable. Will get deduplicated.
781aab86 579 pub fn reserve_and_set_vtable_alloc(
064997fb
FG
580 self,
581 ty: Ty<'tcx>,
582 poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
583 ) -> AllocId {
584 self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, poly_trait_ref))
585 }
586
e1599b0c 587 /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
dc9dc135
XL
588 /// `Allocation` with a different `AllocId`.
589 /// Statics with identical content will still point to the same `Allocation`, i.e.,
590 /// their data will be deduplicated through `Allocation` interning -- but they
591 /// are different places in memory and as such need different IDs.
781aab86 592 pub fn reserve_and_set_memory_alloc(self, mem: ConstAllocation<'tcx>) -> AllocId {
f9f354fc 593 let id = self.reserve_alloc_id();
dc9dc135
XL
594 self.set_alloc_id_memory(id, mem);
595 id
596 }
597
532ac7d7 598 /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
0731742a
XL
599 /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
600 /// illegal and will likely ICE.
601 /// This function exists to allow const eval to detect the difference between evaluation-
602 /// local dangling pointers and allocations in constants/statics.
dc9dc135 603 #[inline]
064997fb 604 pub fn try_get_global_alloc(self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
f9f354fc 605 self.alloc_map.lock().alloc_map.get(&id).cloned()
94b46f34
XL
606 }
607
f9f354fc
XL
608 #[inline]
609 #[track_caller]
610 /// Panics in case the `AllocId` is dangling. Since that is impossible for `AllocId`s in
611 /// constants (as all constants must pass interning and validation that check for dangling
612 /// ids), this function is frequently used throughout rustc, but should not be used within
add651ee 613 /// the interpreter.
1b1a35ee 614 pub fn global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx> {
064997fb 615 match self.try_get_global_alloc(id) {
f9f354fc 616 Some(alloc) => alloc,
064997fb 617 None => bug!("could not find allocation for {id:?}"),
e74abb32
XL
618 }
619 }
620
e1599b0c 621 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
0731742a 622 /// call this function twice, even with the same `Allocation` will ICE the compiler.
5e7ed085 623 pub fn set_alloc_id_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
f9f354fc 624 if let Some(old) = self.alloc_map.lock().alloc_map.insert(id, GlobalAlloc::Memory(mem)) {
064997fb 625 bug!("tried to set allocation ID {id:?}, but it was already existing as {old:#?}");
94b46f34
XL
626 }
627 }
628
e1599b0c 629 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
0731742a 630 /// twice for the same `(AllocId, Allocation)` pair.
5e7ed085 631 fn set_alloc_id_same_memory(self, id: AllocId, mem: ConstAllocation<'tcx>) {
f9f354fc 632 self.alloc_map.lock().alloc_map.insert_same(id, GlobalAlloc::Memory(mem));
94b46f34
XL
633 }
634}
635
94b46f34
XL
636////////////////////////////////////////////////////////////////////////////////
637// Methods to access integers in the target endianness
638////////////////////////////////////////////////////////////////////////////////
639
dc9dc135 640#[inline]
94b46f34 641pub fn write_target_uint(
ba9703b0 642 endianness: Endian,
94b46f34
XL
643 mut target: &mut [u8],
644 data: u128,
645) -> Result<(), io::Error> {
1b1a35ee
XL
646 // This u128 holds an "any-size uint" (since smaller uints can fits in it)
647 // So we do not write all bytes of the u128, just the "payload".
94b46f34 648 match endianness {
1b1a35ee
XL
649 Endian::Little => target.write(&data.to_le_bytes())?,
650 Endian::Big => target.write(&data.to_be_bytes()[16 - target.len()..])?,
651 };
652 debug_assert!(target.len() == 0); // We should have filled the target buffer.
653 Ok(())
94b46f34
XL
654}
655
dc9dc135 656#[inline]
ba9703b0 657pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
1b1a35ee
XL
658 // This u128 holds an "any-size uint" (since smaller uints can fits in it)
659 let mut buf = [0u8; std::mem::size_of::<u128>()];
660 // So we do not read exactly 16 bytes into the u128, just the "payload".
661 let uint = match endianness {
662 Endian::Little => {
663 source.read(&mut buf)?;
664 Ok(u128::from_le_bytes(buf))
665 }
666 Endian::Big => {
667 source.read(&mut buf[16 - source.len()..])?;
668 Ok(u128::from_be_bytes(buf))
669 }
670 };
671 debug_assert!(source.len() == 0); // We should have consumed the source buffer.
672 uint
94b46f34 673}