]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/const_eval/machine.rs
New upstream version 1.55.0+dfsg1
[rustc.git] / compiler / rustc_mir / src / const_eval / machine.rs
1 use rustc_middle::mir;
2 use rustc_middle::ty::{self, Ty};
3 use std::borrow::Borrow;
4 use std::collections::hash_map::Entry;
5 use std::hash::Hash;
6
7 use rustc_data_structures::fx::FxHashMap;
8 use std::fmt;
9
10 use rustc_ast::Mutability;
11 use rustc_hir::def_id::DefId;
12 use rustc_middle::mir::AssertMessage;
13 use rustc_session::Limit;
14 use rustc_span::symbol::{sym, Symbol};
15 use rustc_target::abi::{Align, Size};
16 use rustc_target::spec::abi::Abi;
17
18 use crate::interpret::{
19 self, compile_time_machine, AllocId, Allocation, Frame, ImmTy, InterpCx, InterpResult, OpTy,
20 PlaceTy, Scalar, StackPopUnwind,
21 };
22
23 use super::error::*;
24
25 impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
26 /// "Intercept" a function call to a panic-related function
27 /// because we have something special to do for it.
28 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
29 fn hook_panic_fn(
30 &mut self,
31 instance: ty::Instance<'tcx>,
32 args: &[OpTy<'tcx>],
33 ) -> InterpResult<'tcx> {
34 let def_id = instance.def_id();
35 if Some(def_id) == self.tcx.lang_items().panic_fn()
36 || Some(def_id) == self.tcx.lang_items().panic_str()
37 || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
38 {
39 // &str
40 assert!(args.len() == 1);
41
42 let msg_place = self.deref_operand(&args[0])?;
43 let msg = Symbol::intern(self.read_str(&msg_place)?);
44 let span = self.find_closest_untracked_caller_location();
45 let (file, line, col) = self.location_triple_for_span(span);
46 Err(ConstEvalErrKind::Panic { msg, file, line, col }.into())
47 } else {
48 Ok(())
49 }
50 }
51 }
52
53 /// Extra machine state for CTFE, and the Machine instance
54 pub struct CompileTimeInterpreter<'mir, 'tcx> {
55 /// For now, the number of terminators that can be evaluated before we throw a resource
56 /// exhaustion error.
57 ///
58 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
59 pub steps_remaining: usize,
60
61 /// The virtual call stack.
62 pub(crate) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
63 }
64
65 #[derive(Copy, Clone, Debug)]
66 pub struct MemoryExtra {
67 /// We need to make sure consts never point to anything mutable, even recursively. That is
68 /// relied on for pattern matching on consts with references.
69 /// To achieve this, two pieces have to work together:
70 /// * Interning makes everything outside of statics immutable.
71 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
72 /// This boolean here controls the second part.
73 pub(super) can_access_statics: bool,
74 }
75
76 impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
77 pub(super) fn new(const_eval_limit: Limit) -> Self {
78 CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
79 }
80 }
81
82 impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
83 #[inline(always)]
84 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
85 where
86 K: Borrow<Q>,
87 {
88 FxHashMap::contains_key(self, k)
89 }
90
91 #[inline(always)]
92 fn insert(&mut self, k: K, v: V) -> Option<V> {
93 FxHashMap::insert(self, k, v)
94 }
95
96 #[inline(always)]
97 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
98 where
99 K: Borrow<Q>,
100 {
101 FxHashMap::remove(self, k)
102 }
103
104 #[inline(always)]
105 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
106 self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
107 }
108
109 #[inline(always)]
110 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
111 match self.get(&k) {
112 Some(v) => Ok(v),
113 None => {
114 vacant()?;
115 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
116 }
117 }
118 }
119
120 #[inline(always)]
121 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
122 match self.entry(k) {
123 Entry::Occupied(e) => Ok(e.into_mut()),
124 Entry::Vacant(e) => {
125 let v = vacant()?;
126 Ok(e.insert(v))
127 }
128 }
129 }
130 }
131
132 crate type CompileTimeEvalContext<'mir, 'tcx> =
133 InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
134
135 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
136 pub enum MemoryKind {
137 Heap,
138 }
139
140 impl fmt::Display for MemoryKind {
141 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
142 match self {
143 MemoryKind::Heap => write!(f, "heap allocation"),
144 }
145 }
146 }
147
148 impl interpret::MayLeak for MemoryKind {
149 #[inline(always)]
150 fn may_leak(self) -> bool {
151 match self {
152 MemoryKind::Heap => false,
153 }
154 }
155 }
156
157 impl interpret::MayLeak for ! {
158 #[inline(always)]
159 fn may_leak(self) -> bool {
160 // `self` is uninhabited
161 self
162 }
163 }
164
165 impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
166 fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool {
167 match (a, b) {
168 // Comparisons between integers are always known.
169 (Scalar::Int { .. }, Scalar::Int { .. }) => a == b,
170 // Equality with integers can never be known for sure.
171 (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => false,
172 // FIXME: return `true` for when both sides are the same pointer, *except* that
173 // some things (like functions and vtables) do not have stable addresses
174 // so we need to be careful around them (see e.g. #73722).
175 (Scalar::Ptr(..), Scalar::Ptr(..)) => false,
176 }
177 }
178
179 fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
180 match (a, b) {
181 // Comparisons between integers are always known.
182 (Scalar::Int(_), Scalar::Int(_)) => a != b,
183 // Comparisons of abstract pointers with null pointers are known if the pointer
184 // is in bounds, because if they are in bounds, the pointer can't be null.
185 // Inequality with integers other than null can never be known for sure.
186 (Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
187 int.is_null() && !self.memory.ptr_may_be_null(ptr.into())
188 }
189 // FIXME: return `true` for at least some comparisons where we can reliably
190 // determine the result of runtime inequality tests at compile-time.
191 // Examples include comparison of addresses in different static items.
192 (Scalar::Ptr(..), Scalar::Ptr(..)) => false,
193 }
194 }
195 }
196
197 impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
198 compile_time_machine!(<'mir, 'tcx>);
199
200 type MemoryKind = MemoryKind;
201
202 type MemoryExtra = MemoryExtra;
203
204 const PANIC_ON_ALLOC_FAIL: bool = false; // will be raised as a proper error
205
206 fn load_mir(
207 ecx: &InterpCx<'mir, 'tcx, Self>,
208 instance: ty::InstanceDef<'tcx>,
209 ) -> InterpResult<'tcx, &'tcx mir::Body<'tcx>> {
210 match instance {
211 ty::InstanceDef::Item(def) => {
212 if ecx.tcx.is_ctfe_mir_available(def.did) {
213 Ok(ecx.tcx.mir_for_ctfe_opt_const_arg(def))
214 } else {
215 throw_unsup!(NoMirFor(def.did))
216 }
217 }
218 _ => Ok(ecx.tcx.instance_mir(instance)),
219 }
220 }
221
222 fn find_mir_or_eval_fn(
223 ecx: &mut InterpCx<'mir, 'tcx, Self>,
224 instance: ty::Instance<'tcx>,
225 _abi: Abi,
226 args: &[OpTy<'tcx>],
227 _ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
228 _unwind: StackPopUnwind, // unwinding is not supported in consts
229 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
230 debug!("find_mir_or_eval_fn: {:?}", instance);
231
232 // Only check non-glue functions
233 if let ty::InstanceDef::Item(def) = instance.def {
234 // Execution might have wandered off into other crates, so we cannot do a stability-
235 // sensitive check here. But we can at least rule out functions that are not const
236 // at all.
237 if !ecx.tcx.is_const_fn_raw(def.did) {
238 // allow calling functions marked with #[default_method_body_is_const].
239 if !ecx.tcx.has_attr(def.did, sym::default_method_body_is_const) {
240 // Some functions we support even if they are non-const -- but avoid testing
241 // that for const fn!
242 ecx.hook_panic_fn(instance, args)?;
243 // We certainly do *not* want to actually call the fn
244 // though, so be sure we return here.
245 throw_unsup_format!("calling non-const function `{}`", instance)
246 }
247 }
248 }
249 // This is a const fn. Call it.
250 Ok(Some(match ecx.load_mir(instance.def, None) {
251 Ok(body) => body,
252 Err(err) => {
253 if let err_unsup!(NoMirFor(did)) = err.kind() {
254 let path = ecx.tcx.def_path_str(*did);
255 return Err(ConstEvalErrKind::NeedsRfc(format!(
256 "calling extern function `{}`",
257 path
258 ))
259 .into());
260 }
261 return Err(err);
262 }
263 }))
264 }
265
266 fn call_intrinsic(
267 ecx: &mut InterpCx<'mir, 'tcx, Self>,
268 instance: ty::Instance<'tcx>,
269 args: &[OpTy<'tcx>],
270 ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
271 _unwind: StackPopUnwind,
272 ) -> InterpResult<'tcx> {
273 // Shared intrinsics.
274 if ecx.emulate_intrinsic(instance, args, ret)? {
275 return Ok(());
276 }
277 let intrinsic_name = ecx.tcx.item_name(instance.def_id());
278
279 // CTFE-specific intrinsics.
280 let (dest, ret) = match ret {
281 None => {
282 return Err(ConstEvalErrKind::NeedsRfc(format!(
283 "calling intrinsic `{}`",
284 intrinsic_name
285 ))
286 .into());
287 }
288 Some(p) => p,
289 };
290 match intrinsic_name {
291 sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
292 let a = ecx.read_immediate(&args[0])?.to_scalar()?;
293 let b = ecx.read_immediate(&args[1])?.to_scalar()?;
294 let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
295 ecx.guaranteed_eq(a, b)
296 } else {
297 ecx.guaranteed_ne(a, b)
298 };
299 ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
300 }
301 sym::const_allocate => {
302 let size = ecx.read_scalar(&args[0])?.to_machine_usize(ecx)?;
303 let align = ecx.read_scalar(&args[1])?.to_machine_usize(ecx)?;
304
305 let align = match Align::from_bytes(align) {
306 Ok(a) => a,
307 Err(err) => throw_ub_format!("align has to be a power of 2, {}", err),
308 };
309
310 let ptr = ecx.memory.allocate(
311 Size::from_bytes(size as u64),
312 align,
313 interpret::MemoryKind::Machine(MemoryKind::Heap),
314 )?;
315 ecx.write_pointer(ptr, dest)?;
316 }
317 _ => {
318 return Err(ConstEvalErrKind::NeedsRfc(format!(
319 "calling intrinsic `{}`",
320 intrinsic_name
321 ))
322 .into());
323 }
324 }
325
326 ecx.go_to_block(ret);
327 Ok(())
328 }
329
330 fn assert_panic(
331 ecx: &mut InterpCx<'mir, 'tcx, Self>,
332 msg: &AssertMessage<'tcx>,
333 _unwind: Option<mir::BasicBlock>,
334 ) -> InterpResult<'tcx> {
335 use rustc_middle::mir::AssertKind::*;
336 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
337 let eval_to_int =
338 |op| ecx.read_immediate(&ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
339 let err = match msg {
340 BoundsCheck { ref len, ref index } => {
341 let len = eval_to_int(len)?;
342 let index = eval_to_int(index)?;
343 BoundsCheck { len, index }
344 }
345 Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
346 OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
347 DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
348 RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
349 ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
350 ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
351 };
352 Err(ConstEvalErrKind::AssertFailure(err).into())
353 }
354
355 fn abort(_ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: String) -> InterpResult<'tcx, !> {
356 Err(ConstEvalErrKind::Abort(msg).into())
357 }
358
359 fn binary_ptr_op(
360 _ecx: &InterpCx<'mir, 'tcx, Self>,
361 _bin_op: mir::BinOp,
362 _left: &ImmTy<'tcx>,
363 _right: &ImmTy<'tcx>,
364 ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
365 Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
366 }
367
368 fn box_alloc(
369 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
370 _dest: &PlaceTy<'tcx>,
371 ) -> InterpResult<'tcx> {
372 Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
373 }
374
375 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
376 // The step limit has already been hit in a previous call to `before_terminator`.
377 if ecx.machine.steps_remaining == 0 {
378 return Ok(());
379 }
380
381 ecx.machine.steps_remaining -= 1;
382 if ecx.machine.steps_remaining == 0 {
383 throw_exhaust!(StepLimitReached)
384 }
385
386 Ok(())
387 }
388
389 #[inline(always)]
390 fn init_frame_extra(
391 ecx: &mut InterpCx<'mir, 'tcx, Self>,
392 frame: Frame<'mir, 'tcx>,
393 ) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
394 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
395 if !ecx.recursion_limit.value_within_limit(ecx.stack().len() + 1) {
396 throw_exhaust!(StackFrameLimitReached)
397 } else {
398 Ok(frame)
399 }
400 }
401
402 #[inline(always)]
403 fn stack(
404 ecx: &'a InterpCx<'mir, 'tcx, Self>,
405 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
406 &ecx.machine.stack
407 }
408
409 #[inline(always)]
410 fn stack_mut(
411 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
412 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
413 &mut ecx.machine.stack
414 }
415
416 fn before_access_global(
417 memory_extra: &MemoryExtra,
418 alloc_id: AllocId,
419 allocation: &Allocation,
420 static_def_id: Option<DefId>,
421 is_write: bool,
422 ) -> InterpResult<'tcx> {
423 if is_write {
424 // Write access. These are never allowed, but we give a targeted error message.
425 if allocation.mutability == Mutability::Not {
426 Err(err_ub!(WriteToReadOnly(alloc_id)).into())
427 } else {
428 Err(ConstEvalErrKind::ModifiedGlobal.into())
429 }
430 } else {
431 // Read access. These are usually allowed, with some exceptions.
432 if memory_extra.can_access_statics {
433 // Machine configuration allows us read from anything (e.g., `static` initializer).
434 Ok(())
435 } else if static_def_id.is_some() {
436 // Machine configuration does not allow us to read statics
437 // (e.g., `const` initializer).
438 // See const_eval::machine::MemoryExtra::can_access_statics for why
439 // this check is so important: if we could read statics, we could read pointers
440 // to mutable allocations *inside* statics. These allocations are not themselves
441 // statics, so pointers to them can get around the check in `validity.rs`.
442 Err(ConstEvalErrKind::ConstAccessesStatic.into())
443 } else {
444 // Immutable global, this read is fine.
445 // But make sure we never accept a read from something mutable, that would be
446 // unsound. The reason is that as the content of this allocation may be different
447 // now and at run-time, so if we permit reading now we might return the wrong value.
448 assert_eq!(allocation.mutability, Mutability::Not);
449 Ok(())
450 }
451 }
452 }
453 }
454
455 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
456 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
457 // at the bottom of this file.