]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/const_eval/machine.rs
New upstream version 1.57.0+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / const_eval / machine.rs
CommitLineData
ba9703b0 1use rustc_middle::mir;
ba9703b0 2use rustc_middle::ty::{self, Ty};
f9f354fc 3use std::borrow::Borrow;
dfeec247
XL
4use std::collections::hash_map::Entry;
5use std::hash::Hash;
6
7use rustc_data_structures::fx::FxHashMap;
fc512014 8use std::fmt;
dfeec247 9
3dfed10e 10use rustc_ast::Mutability;
ba9703b0
XL
11use rustc_hir::def_id::DefId;
12use rustc_middle::mir::AssertMessage;
f9f354fc 13use rustc_session::Limit;
1b1a35ee 14use rustc_span::symbol::{sym, Symbol};
fc512014 15use rustc_target::abi::{Align, Size};
5869c6ff 16use rustc_target::spec::abi::Abi;
dfeec247
XL
17
18use crate::interpret::{
136023e0
XL
19 self, compile_time_machine, AllocId, Allocation, Frame, ImmTy, InterpCx, InterpResult, OpTy,
20 PlaceTy, Scalar, StackPopUnwind,
dfeec247
XL
21};
22
23use super::error::*;
24
25impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
74b04a01
XL
26 /// "Intercept" a function call to a panic-related function
27 /// because we have something special to do for it.
28 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
c295e0f8 29 fn hook_special_const_fn(
74b04a01 30 &mut self,
74b04a01
XL
31 instance: ty::Instance<'tcx>,
32 args: &[OpTy<'tcx>],
c295e0f8 33 is_const_fn: bool,
94222f64
XL
34 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
35 // The list of functions we handle here must be in sync with
c295e0f8 36 // `is_lang_special_const_fn` in `transform/check_consts/mod.rs`.
74b04a01 37 let def_id = instance.def_id();
c295e0f8
XL
38
39 if is_const_fn {
40 if Some(def_id) == self.tcx.lang_items().const_eval_select() {
41 // redirect to const_eval_select_ct
42 if let Some(const_eval_select) = self.tcx.lang_items().const_eval_select_ct() {
43 return Ok(Some(
44 ty::Instance::resolve(
45 *self.tcx,
46 ty::ParamEnv::reveal_all(),
47 const_eval_select,
48 instance.substs,
49 )
50 .unwrap()
51 .unwrap(),
52 ));
53 }
54 }
55 return Ok(None);
56 }
57
74b04a01 58 if Some(def_id) == self.tcx.lang_items().panic_fn()
29967ef6 59 || Some(def_id) == self.tcx.lang_items().panic_str()
c295e0f8 60 || Some(def_id) == self.tcx.lang_items().panic_display()
74b04a01
XL
61 || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
62 {
c295e0f8 63 // &str or &&str
74b04a01
XL
64 assert!(args.len() == 1);
65
c295e0f8
XL
66 let mut msg_place = self.deref_operand(&args[0])?;
67 while msg_place.layout.ty.is_ref() {
68 msg_place = self.deref_operand(&msg_place.into())?;
69 }
70
6a06907d 71 let msg = Symbol::intern(self.read_str(&msg_place)?);
ba9703b0 72 let span = self.find_closest_untracked_caller_location();
74b04a01 73 let (file, line, col) = self.location_triple_for_span(span);
94222f64
XL
74 return Err(ConstEvalErrKind::Panic { msg, file, line, col }.into());
75 } else if Some(def_id) == self.tcx.lang_items().panic_fmt()
76 || Some(def_id) == self.tcx.lang_items().begin_panic_fmt()
77 {
78 // For panic_fmt, call const_panic_fmt instead.
79 if let Some(const_panic_fmt) = self.tcx.lang_items().const_panic_fmt() {
80 return Ok(Some(
81 ty::Instance::resolve(
82 *self.tcx,
83 ty::ParamEnv::reveal_all(),
84 const_panic_fmt,
85 self.tcx.intern_substs(&[]),
86 )
87 .unwrap()
88 .unwrap(),
89 ));
90 }
74b04a01 91 }
94222f64 92 Ok(None)
74b04a01 93 }
dfeec247
XL
94}
95
ba9703b0 96/// Extra machine state for CTFE, and the Machine instance
dfeec247 97pub struct CompileTimeInterpreter<'mir, 'tcx> {
ba9703b0 98 /// For now, the number of terminators that can be evaluated before we throw a resource
cdc7bbd5 99 /// exhaustion error.
ba9703b0
XL
100 ///
101 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
102 pub steps_remaining: usize,
74b04a01 103
ba9703b0 104 /// The virtual call stack.
136023e0 105 pub(crate) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
dfeec247
XL
106}
107
108#[derive(Copy, Clone, Debug)]
109pub struct MemoryExtra {
f9f354fc
XL
110 /// We need to make sure consts never point to anything mutable, even recursively. That is
111 /// relied on for pattern matching on consts with references.
112 /// To achieve this, two pieces have to work together:
113 /// * Interning makes everything outside of statics immutable.
114 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
115 /// This boolean here controls the second part.
dfeec247
XL
116 pub(super) can_access_statics: bool,
117}
118
119impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
f9f354fc
XL
120 pub(super) fn new(const_eval_limit: Limit) -> Self {
121 CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
dfeec247
XL
122 }
123}
124
125impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
126 #[inline(always)]
127 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
128 where
129 K: Borrow<Q>,
130 {
131 FxHashMap::contains_key(self, k)
132 }
133
134 #[inline(always)]
135 fn insert(&mut self, k: K, v: V) -> Option<V> {
136 FxHashMap::insert(self, k, v)
137 }
138
139 #[inline(always)]
140 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
141 where
142 K: Borrow<Q>,
143 {
144 FxHashMap::remove(self, k)
145 }
146
147 #[inline(always)]
148 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
149 self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
150 }
151
152 #[inline(always)]
153 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
154 match self.get(&k) {
155 Some(v) => Ok(v),
156 None => {
157 vacant()?;
158 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
159 }
160 }
161 }
162
163 #[inline(always)]
164 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
165 match self.entry(k) {
166 Entry::Occupied(e) => Ok(e.into_mut()),
167 Entry::Vacant(e) => {
168 let v = vacant()?;
169 Ok(e.insert(v))
170 }
171 }
172 }
173}
174
175crate type CompileTimeEvalContext<'mir, 'tcx> =
176 InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
177
fc512014
XL
178#[derive(Debug, PartialEq, Eq, Copy, Clone)]
179pub enum MemoryKind {
180 Heap,
181}
182
183impl fmt::Display for MemoryKind {
184 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
185 match self {
186 MemoryKind::Heap => write!(f, "heap allocation"),
187 }
188 }
189}
190
191impl interpret::MayLeak for MemoryKind {
192 #[inline(always)]
193 fn may_leak(self) -> bool {
194 match self {
195 MemoryKind::Heap => false,
196 }
197 }
198}
199
dfeec247
XL
200impl interpret::MayLeak for ! {
201 #[inline(always)]
202 fn may_leak(self) -> bool {
203 // `self` is uninhabited
204 self
205 }
206}
207
1b1a35ee
XL
208impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
209 fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool {
210 match (a, b) {
211 // Comparisons between integers are always known.
29967ef6 212 (Scalar::Int { .. }, Scalar::Int { .. }) => a == b,
1b1a35ee 213 // Equality with integers can never be known for sure.
136023e0 214 (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => false,
1b1a35ee
XL
215 // FIXME: return `true` for when both sides are the same pointer, *except* that
216 // some things (like functions and vtables) do not have stable addresses
217 // so we need to be careful around them (see e.g. #73722).
136023e0 218 (Scalar::Ptr(..), Scalar::Ptr(..)) => false,
1b1a35ee
XL
219 }
220 }
221
222 fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
223 match (a, b) {
224 // Comparisons between integers are always known.
29967ef6 225 (Scalar::Int(_), Scalar::Int(_)) => a != b,
1b1a35ee
XL
226 // Comparisons of abstract pointers with null pointers are known if the pointer
227 // is in bounds, because if they are in bounds, the pointer can't be null.
1b1a35ee 228 // Inequality with integers other than null can never be known for sure.
136023e0
XL
229 (Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
230 int.is_null() && !self.memory.ptr_may_be_null(ptr.into())
29967ef6 231 }
1b1a35ee
XL
232 // FIXME: return `true` for at least some comparisons where we can reliably
233 // determine the result of runtime inequality tests at compile-time.
234 // Examples include comparison of addresses in different static items.
136023e0 235 (Scalar::Ptr(..), Scalar::Ptr(..)) => false,
1b1a35ee
XL
236 }
237 }
238}
239
dfeec247 240impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
f9f354fc 241 compile_time_machine!(<'mir, 'tcx>);
dfeec247 242
fc512014
XL
243 type MemoryKind = MemoryKind;
244
dfeec247 245 type MemoryExtra = MemoryExtra;
dfeec247 246
136023e0
XL
247 const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error
248
5869c6ff
XL
249 fn load_mir(
250 ecx: &InterpCx<'mir, 'tcx, Self>,
251 instance: ty::InstanceDef<'tcx>,
252 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
253 match instance {
254 ty::InstanceDef::Item(def) => {
255 if ecx.tcx.is_ctfe_mir_available(def.did) {
256 Ok(ecx.tcx.mir_for_ctfe_opt_const_arg(def))
257 } else {
94222f64
XL
258 let path = ecx.tcx.def_path_str(def.did);
259 Err(ConstEvalErrKind::NeedsRfc(format!("calling extern function `{}`", path))
260 .into())
5869c6ff
XL
261 }
262 }
263 _ => Ok(ecx.tcx.instance_mir(instance)),
264 }
265 }
266
dfeec247
XL
267 fn find_mir_or_eval_fn(
268 ecx: &mut InterpCx<'mir, 'tcx, Self>,
dfeec247 269 instance: ty::Instance<'tcx>,
5869c6ff 270 _abi: Abi,
dfeec247 271 args: &[OpTy<'tcx>],
6a06907d 272 _ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
17df50a5 273 _unwind: StackPopUnwind, // unwinding is not supported in consts
dfeec247
XL
274 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
275 debug!("find_mir_or_eval_fn: {:?}", instance);
276
277 // Only check non-glue functions
3dfed10e 278 if let ty::InstanceDef::Item(def) = instance.def {
c295e0f8
XL
279 let mut is_const_fn = true;
280
dfeec247
XL
281 // Execution might have wandered off into other crates, so we cannot do a stability-
282 // sensitive check here. But we can at least rule out functions that are not const
283 // at all.
fc512014 284 if !ecx.tcx.is_const_fn_raw(def.did) {
136023e0
XL
285 // allow calling functions marked with #[default_method_body_is_const].
286 if !ecx.tcx.has_attr(def.did, sym::default_method_body_is_const) {
c295e0f8 287 is_const_fn = false;
136023e0 288 }
dfeec247 289 }
c295e0f8
XL
290
291 // Some functions we support even if they are non-const -- but avoid testing
292 // that for const fn!
293 // `const_eval_select` is a const fn because it must use const trait bounds.
294 if let Some(new_instance) = ecx.hook_special_const_fn(instance, args, is_const_fn)? {
295 // We call another const fn instead.
296 return Self::find_mir_or_eval_fn(ecx, new_instance, _abi, args, _ret, _unwind);
297 }
298
299 if !is_const_fn {
300 // We certainly do *not* want to actually call the fn
301 // though, so be sure we return here.
302 throw_unsup_format!("calling non-const function `{}`", instance)
303 }
dfeec247
XL
304 }
305 // This is a const fn. Call it.
94222f64 306 Ok(Some(ecx.load_mir(instance.def, None)?))
dfeec247
XL
307 }
308
dfeec247
XL
309 fn call_intrinsic(
310 ecx: &mut InterpCx<'mir, 'tcx, Self>,
dfeec247
XL
311 instance: ty::Instance<'tcx>,
312 args: &[OpTy<'tcx>],
6a06907d 313 ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
17df50a5 314 _unwind: StackPopUnwind,
dfeec247 315 ) -> InterpResult<'tcx> {
1b1a35ee 316 // Shared intrinsics.
ba9703b0 317 if ecx.emulate_intrinsic(instance, args, ret)? {
dfeec247
XL
318 return Ok(());
319 }
dfeec247 320 let intrinsic_name = ecx.tcx.item_name(instance.def_id());
1b1a35ee
XL
321
322 // CTFE-specific intrinsics.
323 let (dest, ret) = match ret {
324 None => {
325 return Err(ConstEvalErrKind::NeedsRfc(format!(
326 "calling intrinsic `{}`",
327 intrinsic_name
328 ))
329 .into());
330 }
331 Some(p) => p,
332 };
333 match intrinsic_name {
334 sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
6a06907d
XL
335 let a = ecx.read_immediate(&args[0])?.to_scalar()?;
336 let b = ecx.read_immediate(&args[1])?.to_scalar()?;
1b1a35ee
XL
337 let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
338 ecx.guaranteed_eq(a, b)
339 } else {
340 ecx.guaranteed_ne(a, b)
341 };
342 ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
343 }
fc512014 344 sym::const_allocate => {
6a06907d
XL
345 let size = ecx.read_scalar(&args[0])?.to_machine_usize(ecx)?;
346 let align = ecx.read_scalar(&args[1])?.to_machine_usize(ecx)?;
fc512014
XL
347
348 let align = match Align::from_bytes(align) {
349 Ok(a) => a,
350 Err(err) => throw_ub_format!("align has to be a power of 2, {}", err),
351 };
352
353 let ptr = ecx.memory.allocate(
354 Size::from_bytes(size as u64),
355 align,
356 interpret::MemoryKind::Machine(MemoryKind::Heap),
136023e0
XL
357 )?;
358 ecx.write_pointer(ptr, dest)?;
fc512014 359 }
1b1a35ee
XL
360 _ => {
361 return Err(ConstEvalErrKind::NeedsRfc(format!(
362 "calling intrinsic `{}`",
363 intrinsic_name
364 ))
365 .into());
366 }
367 }
368
369 ecx.go_to_block(ret);
370 Ok(())
dfeec247
XL
371 }
372
373 fn assert_panic(
374 ecx: &mut InterpCx<'mir, 'tcx, Self>,
dfeec247
XL
375 msg: &AssertMessage<'tcx>,
376 _unwind: Option<mir::BasicBlock>,
377 ) -> InterpResult<'tcx> {
ba9703b0 378 use rustc_middle::mir::AssertKind::*;
f035d41b
XL
379 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
380 let eval_to_int =
6a06907d 381 |op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
74b04a01 382 let err = match msg {
dfeec247 383 BoundsCheck { ref len, ref index } => {
f035d41b
XL
384 let len = eval_to_int(len)?;
385 let index = eval_to_int(index)?;
74b04a01 386 BoundsCheck { len, index }
dfeec247 387 }
f035d41b
XL
388 Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
389 OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
390 DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
391 RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
74b04a01
XL
392 ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
393 ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
394 };
395 Err(ConstEvalErrKind::AssertFailure(err).into())
dfeec247
XL
396 }
397
fc512014
XL
398 fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: String) -> InterpResult<'tcx, !> {
399 Err(ConstEvalErrKind::Abort(msg).into())
400 }
401
dfeec247
XL
402 fn binary_ptr_op(
403 _ecx: &InterpCx<'mir, 'tcx, Self>,
404 _bin_op: mir::BinOp,
6a06907d
XL
405 _left: &ImmTy<'tcx>,
406 _right: &ImmTy<'tcx>,
dfeec247 407 ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
74b04a01 408 Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
dfeec247
XL
409 }
410
dfeec247
XL
411 fn box_alloc(
412 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
6a06907d 413 _dest: &PlaceTy<'tcx>,
dfeec247 414 ) -> InterpResult<'tcx> {
74b04a01 415 Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
dfeec247
XL
416 }
417
418 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
ba9703b0
XL
419 // The step limit has already been hit in a previous call to `before_terminator`.
420 if ecx.machine.steps_remaining == 0 {
74b04a01
XL
421 return Ok(());
422 }
423
ba9703b0
XL
424 ecx.machine.steps_remaining -= 1;
425 if ecx.machine.steps_remaining == 0 {
426 throw_exhaust!(StepLimitReached)
427 }
dfeec247 428
ba9703b0
XL
429 Ok(())
430 }
dfeec247 431
3dfed10e
XL
432 #[inline(always)]
433 fn init_frame_extra(
434 ecx: &mut InterpCx<'mir, 'tcx, Self>,
435 frame: Frame<'mir, 'tcx>,
436 ) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
437 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
136023e0 438 if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) {
3dfed10e
XL
439 throw_exhaust!(StackFrameLimitReached)
440 } else {
441 Ok(frame)
442 }
443 }
444
ba9703b0
XL
445 #[inline(always)]
446 fn stack(
447 ecx: &'a InterpCx<'mir, 'tcx, Self>,
448 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
449 &ecx.machine.stack
dfeec247
XL
450 }
451
452 #[inline(always)]
ba9703b0
XL
453 fn stack_mut(
454 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
455 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
456 &mut ecx.machine.stack
dfeec247
XL
457 }
458
ba9703b0 459 fn before_access_global(
dfeec247 460 memory_extra: &MemoryExtra,
ba9703b0
XL
461 alloc_id: AllocId,
462 allocation: &Allocation,
463 static_def_id: Option<DefId>,
464 is_write: bool,
dfeec247 465 ) -> InterpResult<'tcx> {
ba9703b0
XL
466 if is_write {
467 // Write access. These are never allowed, but we give a targeted error message.
468 if allocation.mutability == Mutability::Not {
469 Err(err_ub!(WriteToReadOnly(alloc_id)).into())
470 } else {
471 Err(ConstEvalErrKind::ModifiedGlobal.into())
472 }
dfeec247 473 } else {
ba9703b0
XL
474 // Read access. These are usually allowed, with some exceptions.
475 if memory_extra.can_access_statics {
476 // Machine configuration allows us read from anything (e.g., `static` initializer).
477 Ok(())
478 } else if static_def_id.is_some() {
479 // Machine configuration does not allow us to read statics
480 // (e.g., `const` initializer).
f9f354fc
XL
481 // See const_eval::machine::MemoryExtra::can_access_statics for why
482 // this check is so important: if we could read statics, we could read pointers
483 // to mutable allocations *inside* statics. These allocations are not themselves
484 // statics, so pointers to them can get around the check in `validity.rs`.
ba9703b0
XL
485 Err(ConstEvalErrKind::ConstAccessesStatic.into())
486 } else {
487 // Immutable global, this read is fine.
488 // But make sure we never accept a read from something mutable, that would be
489 // unsound. The reason is that as the content of this allocation may be different
490 // now and at run-time, so if we permit reading now we might return the wrong value.
491 assert_eq!(allocation.mutability, Mutability::Not);
492 Ok(())
493 }
dfeec247
XL
494 }
495 }
496}
497
498// Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
499// so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
500// at the bottom of this file.