]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/interpret/validity.rs
New upstream version 1.41.1+dfsg1
[rustc.git] / src / librustc_mir / interpret / validity.rs
1 //! Check the validity invariant of a given value, and tell the user
2 //! where in the value it got violated.
3 //! In const context, this goes even further and tries to approximate const safety.
4 //! That's useful because it means other passes (e.g. promotion) can rely on `const`s
5 //! to be const-safe.
6
7 use std::fmt::Write;
8 use std::ops::RangeInclusive;
9
10 use rustc::hir;
11 use rustc::ty;
12 use rustc::ty::layout::{self, LayoutOf, TyLayout, VariantIdx};
13 use rustc_data_structures::fx::FxHashSet;
14 use syntax_pos::symbol::{sym, Symbol};
15
16 use std::hash::Hash;
17
18 use super::{
19 CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, Scalar,
20 ValueVisitor,
21 };
22
23 macro_rules! throw_validation_failure {
24 ($what:expr, $where:expr, $details:expr) => {{
25 let mut msg = format!("encountered {}", $what);
26 let where_ = &$where;
27 if !where_.is_empty() {
28 msg.push_str(" at ");
29 write_path(&mut msg, where_);
30 }
31 write!(&mut msg, ", but expected {}", $details).unwrap();
32 throw_unsup!(ValidationFailure(msg))
33 }};
34 ($what:expr, $where:expr) => {{
35 let mut msg = format!("encountered {}", $what);
36 let where_ = &$where;
37 if !where_.is_empty() {
38 msg.push_str(" at ");
39 write_path(&mut msg, where_);
40 }
41 throw_unsup!(ValidationFailure(msg))
42 }};
43 }
44
45 macro_rules! try_validation {
46 ($e:expr, $what:expr, $where:expr, $details:expr) => {{
47 match $e {
48 Ok(x) => x,
49 Err(_) => throw_validation_failure!($what, $where, $details),
50 }
51 }};
52
53 ($e:expr, $what:expr, $where:expr) => {{
54 match $e {
55 Ok(x) => x,
56 Err(_) => throw_validation_failure!($what, $where),
57 }
58 }};
59 }
60
61 /// We want to show a nice path to the invalid field for diagnostics,
62 /// but avoid string operations in the happy case where no error happens.
63 /// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
64 /// need to later print something for the user.
65 #[derive(Copy, Clone, Debug)]
66 pub enum PathElem {
67 Field(Symbol),
68 Variant(Symbol),
69 GeneratorState(VariantIdx),
70 ClosureVar(Symbol),
71 ArrayElem(usize),
72 TupleElem(usize),
73 Deref,
74 Tag,
75 DynDowncast,
76 }
77
78 /// State for tracking recursive validation of references
79 pub struct RefTracking<T, PATH = ()> {
80 pub seen: FxHashSet<T>,
81 pub todo: Vec<(T, PATH)>,
82 }
83
84 impl<T: Copy + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
85 pub fn empty() -> Self {
86 RefTracking { seen: FxHashSet::default(), todo: vec![] }
87 }
88 pub fn new(op: T) -> Self {
89 let mut ref_tracking_for_consts =
90 RefTracking { seen: FxHashSet::default(), todo: vec![(op, PATH::default())] };
91 ref_tracking_for_consts.seen.insert(op);
92 ref_tracking_for_consts
93 }
94
95 pub fn track(&mut self, op: T, path: impl FnOnce() -> PATH) {
96 if self.seen.insert(op) {
97 trace!("Recursing below ptr {:#?}", op);
98 let path = path();
99 // Remember to come back to this later.
100 self.todo.push((op, path));
101 }
102 }
103 }
104
105 /// Format a path
106 fn write_path(out: &mut String, path: &Vec<PathElem>) {
107 use self::PathElem::*;
108
109 for elem in path.iter() {
110 match elem {
111 Field(name) => write!(out, ".{}", name),
112 Variant(name) => write!(out, ".<downcast-variant({})>", name),
113 GeneratorState(idx) => write!(out, ".<generator-state({})>", idx.index()),
114 ClosureVar(name) => write!(out, ".<closure-var({})>", name),
115 TupleElem(idx) => write!(out, ".{}", idx),
116 ArrayElem(idx) => write!(out, "[{}]", idx),
117 Deref =>
118 // This does not match Rust syntax, but it is more readable for long paths -- and
119 // some of the other items here also are not Rust syntax. Actually we can't
120 // even use the usual syntax because we are just showing the projections,
121 // not the root.
122 {
123 write!(out, ".<deref>")
124 }
125 Tag => write!(out, ".<enum-tag>"),
126 DynDowncast => write!(out, ".<dyn-downcast>"),
127 }
128 .unwrap()
129 }
130 }
131
132 // Test if a range that wraps at overflow contains `test`
133 fn wrapping_range_contains(r: &RangeInclusive<u128>, test: u128) -> bool {
134 let (lo, hi) = r.clone().into_inner();
135 if lo > hi {
136 // Wrapped
137 (..=hi).contains(&test) || (lo..).contains(&test)
138 } else {
139 // Normal
140 r.contains(&test)
141 }
142 }
143
144 // Formats such that a sentence like "expected something {}" to mean
145 // "expected something <in the given range>" makes sense.
146 fn wrapping_range_format(r: &RangeInclusive<u128>, max_hi: u128) -> String {
147 let (lo, hi) = r.clone().into_inner();
148 debug_assert!(hi <= max_hi);
149 if lo > hi {
150 format!("less or equal to {}, or greater or equal to {}", hi, lo)
151 } else if lo == hi {
152 format!("equal to {}", lo)
153 } else if lo == 0 {
154 debug_assert!(hi < max_hi, "should not be printing if the range covers everything");
155 format!("less or equal to {}", hi)
156 } else if hi == max_hi {
157 debug_assert!(lo > 0, "should not be printing if the range covers everything");
158 format!("greater or equal to {}", lo)
159 } else {
160 format!("in the range {:?}", r)
161 }
162 }
163
164 struct ValidityVisitor<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> {
165 /// The `path` may be pushed to, but the part that is present when a function
166 /// starts must not be changed! `visit_fields` and `visit_array` rely on
167 /// this stack discipline.
168 path: Vec<PathElem>,
169 ref_tracking_for_consts:
170 Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>>,
171 ecx: &'rt InterpCx<'mir, 'tcx, M>,
172 }
173
174 impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M> {
175 fn aggregate_field_path_elem(&mut self, layout: TyLayout<'tcx>, field: usize) -> PathElem {
176 match layout.ty.kind {
177 // generators and closures.
178 ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
179 let mut name = None;
180 if def_id.is_local() {
181 let tables = self.ecx.tcx.typeck_tables_of(def_id);
182 if let Some(upvars) = tables.upvar_list.get(&def_id) {
183 // Sometimes the index is beyond the number of upvars (seen
184 // for a generator).
185 if let Some((&var_hir_id, _)) = upvars.get_index(field) {
186 let node = self.ecx.tcx.hir().get(var_hir_id);
187 if let hir::Node::Binding(pat) = node {
188 if let hir::PatKind::Binding(_, _, ident, _) = pat.kind {
189 name = Some(ident.name);
190 }
191 }
192 }
193 }
194 }
195
196 PathElem::ClosureVar(name.unwrap_or_else(|| {
197 // Fall back to showing the field index.
198 sym::integer(field)
199 }))
200 }
201
202 // tuples
203 ty::Tuple(_) => PathElem::TupleElem(field),
204
205 // enums
206 ty::Adt(def, ..) if def.is_enum() => {
207 // we might be projecting *to* a variant, or to a field *in*a variant.
208 match layout.variants {
209 layout::Variants::Single { index } =>
210 // Inside a variant
211 {
212 PathElem::Field(def.variants[index].fields[field].ident.name)
213 }
214 _ => bug!(),
215 }
216 }
217
218 // other ADTs
219 ty::Adt(def, _) => PathElem::Field(def.non_enum_variant().fields[field].ident.name),
220
221 // arrays/slices
222 ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
223
224 // dyn traits
225 ty::Dynamic(..) => PathElem::DynDowncast,
226
227 // nothing else has an aggregate layout
228 _ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
229 }
230 }
231
232 fn visit_elem(
233 &mut self,
234 new_op: OpTy<'tcx, M::PointerTag>,
235 elem: PathElem,
236 ) -> InterpResult<'tcx> {
237 // Remember the old state
238 let path_len = self.path.len();
239 // Perform operation
240 self.path.push(elem);
241 self.visit_value(new_op)?;
242 // Undo changes
243 self.path.truncate(path_len);
244 Ok(())
245 }
246
247 fn check_wide_ptr_meta(
248 &mut self,
249 meta: Option<Scalar<M::PointerTag>>,
250 pointee: TyLayout<'tcx>,
251 ) -> InterpResult<'tcx> {
252 let tail = self.ecx.tcx.struct_tail_erasing_lifetimes(pointee.ty, self.ecx.param_env);
253 match tail.kind {
254 ty::Dynamic(..) => {
255 let vtable = meta.unwrap();
256 try_validation!(
257 self.ecx.memory.check_ptr_access(
258 vtable,
259 3 * self.ecx.tcx.data_layout.pointer_size, // drop, size, align
260 self.ecx.tcx.data_layout.pointer_align.abi,
261 ),
262 "dangling or unaligned vtable pointer in wide pointer or too small vtable",
263 self.path
264 );
265 try_validation!(
266 self.ecx.read_drop_type_from_vtable(vtable),
267 "invalid drop fn in vtable",
268 self.path
269 );
270 try_validation!(
271 self.ecx.read_size_and_align_from_vtable(vtable),
272 "invalid size or align in vtable",
273 self.path
274 );
275 // FIXME: More checks for the vtable.
276 }
277 ty::Slice(..) | ty::Str => {
278 let _len = try_validation!(
279 meta.unwrap().to_machine_usize(self.ecx),
280 "non-integer slice length in wide pointer",
281 self.path
282 );
283 // We do not check that `len * elem_size <= isize::MAX`:
284 // that is only required for references, and there it falls out of the
285 // "dereferenceable" check performed by Stacked Borrows.
286 }
287 ty::Foreign(..) => {
288 // Unsized, but not wide.
289 }
290 _ => bug!("Unexpected unsized type tail: {:?}", tail),
291 }
292
293 Ok(())
294 }
295 }
296
297 impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
298 for ValidityVisitor<'rt, 'mir, 'tcx, M>
299 {
300 type V = OpTy<'tcx, M::PointerTag>;
301
302 #[inline(always)]
303 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
304 &self.ecx
305 }
306
307 #[inline]
308 fn visit_field(
309 &mut self,
310 old_op: OpTy<'tcx, M::PointerTag>,
311 field: usize,
312 new_op: OpTy<'tcx, M::PointerTag>,
313 ) -> InterpResult<'tcx> {
314 let elem = self.aggregate_field_path_elem(old_op.layout, field);
315 self.visit_elem(new_op, elem)
316 }
317
318 #[inline]
319 fn visit_variant(
320 &mut self,
321 old_op: OpTy<'tcx, M::PointerTag>,
322 variant_id: VariantIdx,
323 new_op: OpTy<'tcx, M::PointerTag>,
324 ) -> InterpResult<'tcx> {
325 let name = match old_op.layout.ty.kind {
326 ty::Adt(adt, _) => PathElem::Variant(adt.variants[variant_id].ident.name),
327 // Generators also have variants
328 ty::Generator(..) => PathElem::GeneratorState(variant_id),
329 _ => bug!("Unexpected type with variant: {:?}", old_op.layout.ty),
330 };
331 self.visit_elem(new_op, name)
332 }
333
334 #[inline]
335 fn visit_value(&mut self, op: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
336 trace!("visit_value: {:?}, {:?}", *op, op.layout);
337 // Translate some possible errors to something nicer.
338 match self.walk_value(op) {
339 Ok(()) => Ok(()),
340 Err(err) => match err.kind {
341 err_ub!(InvalidDiscriminant(val)) => {
342 throw_validation_failure!(val, self.path, "a valid enum discriminant")
343 }
344 err_unsup!(ReadPointerAsBytes) => {
345 throw_validation_failure!("a pointer", self.path, "plain (non-pointer) bytes")
346 }
347 _ => Err(err),
348 },
349 }
350 }
351
352 fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> InterpResult<'tcx> {
353 let value = self.ecx.read_immediate(value)?;
354 // Go over all the primitive types
355 let ty = value.layout.ty;
356 match ty.kind {
357 ty::Bool => {
358 let value = value.to_scalar_or_undef();
359 try_validation!(value.to_bool(), value, self.path, "a boolean");
360 }
361 ty::Char => {
362 let value = value.to_scalar_or_undef();
363 try_validation!(value.to_char(), value, self.path, "a valid unicode codepoint");
364 }
365 ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
366 // NOTE: Keep this in sync with the array optimization for int/float
367 // types below!
368 let size = value.layout.size;
369 let value = value.to_scalar_or_undef();
370 if self.ref_tracking_for_consts.is_some() {
371 // Integers/floats in CTFE: Must be scalar bits, pointers are dangerous
372 try_validation!(
373 value.to_bits(size),
374 value,
375 self.path,
376 "initialized plain (non-pointer) bytes"
377 );
378 } else {
379 // At run-time, for now, we accept *anything* for these types, including
380 // undef. We should fix that, but let's start low.
381 }
382 }
383 ty::RawPtr(..) => {
384 // We are conservative with undef for integers, but try to
385 // actually enforce our current rules for raw pointers.
386 let place =
387 try_validation!(self.ecx.ref_to_mplace(value), "undefined pointer", self.path);
388 if place.layout.is_unsized() {
389 self.check_wide_ptr_meta(place.meta, place.layout)?;
390 }
391 }
392 _ if ty.is_box() || ty.is_region_ptr() => {
393 // Handle wide pointers.
394 // Check metadata early, for better diagnostics
395 let place =
396 try_validation!(self.ecx.ref_to_mplace(value), "undefined pointer", self.path);
397 if place.layout.is_unsized() {
398 self.check_wide_ptr_meta(place.meta, place.layout)?;
399 }
400 // Make sure this is dereferenceable and all.
401 let (size, align) = self
402 .ecx
403 .size_and_align_of(place.meta, place.layout)?
404 // for the purpose of validity, consider foreign types to have
405 // alignment and size determined by the layout (size will be 0,
406 // alignment should take attributes into account).
407 .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
408 let ptr: Option<_> = match self.ecx.memory.check_ptr_access_align(
409 place.ptr,
410 size,
411 Some(align),
412 CheckInAllocMsg::InboundsTest,
413 ) {
414 Ok(ptr) => ptr,
415 Err(err) => {
416 info!(
417 "{:?} did not pass access check for size {:?}, align {:?}",
418 place.ptr, size, align
419 );
420 match err.kind {
421 err_unsup!(InvalidNullPointerUsage) => {
422 throw_validation_failure!("NULL reference", self.path)
423 }
424 err_unsup!(AlignmentCheckFailed { required, has }) => {
425 throw_validation_failure!(
426 format_args!(
427 "unaligned reference \
428 (required {} byte alignment but found {})",
429 required.bytes(),
430 has.bytes()
431 ),
432 self.path
433 )
434 }
435 err_unsup!(ReadBytesAsPointer) => throw_validation_failure!(
436 "dangling reference (created from integer)",
437 self.path
438 ),
439 _ => throw_validation_failure!(
440 "dangling reference (not entirely in bounds)",
441 self.path
442 ),
443 }
444 }
445 };
446 // Recursive checking
447 if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts {
448 if let Some(ptr) = ptr {
449 // not a ZST
450 // Skip validation entirely for some external statics
451 let alloc_kind = self.ecx.tcx.alloc_map.lock().get(ptr.alloc_id);
452 if let Some(GlobalAlloc::Static(did)) = alloc_kind {
453 // `extern static` cannot be validated as they have no body.
454 // FIXME: Statics from other crates are also skipped.
455 // They might be checked at a different type, but for now we
456 // want to avoid recursing too deeply. This is not sound!
457 if !did.is_local() || self.ecx.tcx.is_foreign_item(did) {
458 return Ok(());
459 }
460 }
461 }
462 // Proceed recursively even for ZST, no reason to skip them!
463 // `!` is a ZST and we want to validate it.
464 // Normalize before handing `place` to tracking because that will
465 // check for duplicates.
466 let place = if size.bytes() > 0 {
467 self.ecx.force_mplace_ptr(place).expect("we already bounds-checked")
468 } else {
469 place
470 };
471 let path = &self.path;
472 ref_tracking.track(place, || {
473 // We need to clone the path anyway, make sure it gets created
474 // with enough space for the additional `Deref`.
475 let mut new_path = Vec::with_capacity(path.len() + 1);
476 new_path.clone_from(path);
477 new_path.push(PathElem::Deref);
478 new_path
479 });
480 }
481 }
482 ty::FnPtr(_sig) => {
483 let value = value.to_scalar_or_undef();
484 let _fn = try_validation!(
485 value.not_undef().and_then(|ptr| self.ecx.memory.get_fn(ptr)),
486 value,
487 self.path,
488 "a function pointer"
489 );
490 // FIXME: Check if the signature matches
491 }
492 // This should be all the primitive types
493 _ => bug!("Unexpected primitive type {}", value.layout.ty),
494 }
495 Ok(())
496 }
497
498 fn visit_uninhabited(&mut self) -> InterpResult<'tcx> {
499 throw_validation_failure!("a value of an uninhabited type", self.path)
500 }
501
502 fn visit_scalar(
503 &mut self,
504 op: OpTy<'tcx, M::PointerTag>,
505 layout: &layout::Scalar,
506 ) -> InterpResult<'tcx> {
507 let value = self.ecx.read_scalar(op)?;
508 // Determine the allowed range
509 let (lo, hi) = layout.valid_range.clone().into_inner();
510 // `max_hi` is as big as the size fits
511 let max_hi = u128::max_value() >> (128 - op.layout.size.bits());
512 assert!(hi <= max_hi);
513 // We could also write `(hi + 1) % (max_hi + 1) == lo` but `max_hi + 1` overflows for `u128`
514 if (lo == 0 && hi == max_hi) || (hi + 1 == lo) {
515 // Nothing to check
516 return Ok(());
517 }
518 // At least one value is excluded. Get the bits.
519 let value = try_validation!(
520 value.not_undef(),
521 value,
522 self.path,
523 format_args!("something {}", wrapping_range_format(&layout.valid_range, max_hi),)
524 );
525 let bits = match value.to_bits_or_ptr(op.layout.size, self.ecx) {
526 Err(ptr) => {
527 if lo == 1 && hi == max_hi {
528 // Only NULL is the niche. So make sure the ptr is NOT NULL.
529 if self.ecx.memory.ptr_may_be_null(ptr) {
530 throw_validation_failure!(
531 "a potentially NULL pointer",
532 self.path,
533 format_args!(
534 "something that cannot possibly fail to be {}",
535 wrapping_range_format(&layout.valid_range, max_hi)
536 )
537 )
538 }
539 return Ok(());
540 } else {
541 // Conservatively, we reject, because the pointer *could* have a bad
542 // value.
543 throw_validation_failure!(
544 "a pointer",
545 self.path,
546 format_args!(
547 "something that cannot possibly fail to be {}",
548 wrapping_range_format(&layout.valid_range, max_hi)
549 )
550 )
551 }
552 }
553 Ok(data) => data,
554 };
555 // Now compare. This is slightly subtle because this is a special "wrap-around" range.
556 if wrapping_range_contains(&layout.valid_range, bits) {
557 Ok(())
558 } else {
559 throw_validation_failure!(
560 bits,
561 self.path,
562 format_args!("something {}", wrapping_range_format(&layout.valid_range, max_hi))
563 )
564 }
565 }
566
567 fn visit_aggregate(
568 &mut self,
569 op: OpTy<'tcx, M::PointerTag>,
570 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
571 ) -> InterpResult<'tcx> {
572 match op.layout.ty.kind {
573 ty::Str => {
574 let mplace = op.assert_mem_place(); // strings are never immediate
575 try_validation!(
576 self.ecx.read_str(mplace),
577 "uninitialized or non-UTF-8 data in str",
578 self.path
579 );
580 }
581 ty::Array(tys, ..) | ty::Slice(tys)
582 if {
583 // This optimization applies for types that can hold arbitrary bytes (such as
584 // integer and floating point types) or for structs or tuples with no fields.
585 // FIXME(wesleywiser) This logic could be extended further to arbitrary structs
586 // or tuples made up of integer/floating point types or inhabited ZSTs with no
587 // padding.
588 match tys.kind {
589 ty::Int(..) | ty::Uint(..) | ty::Float(..) => true,
590 ty::Tuple(tys) if tys.len() == 0 => true,
591 ty::Adt(adt_def, _)
592 if adt_def.is_struct() && adt_def.all_fields().next().is_none() =>
593 {
594 true
595 }
596 _ => false,
597 }
598 } =>
599 {
600 // Optimized handling for arrays of integer/float type.
601
602 // bailing out for zsts is ok, since the array element type can only be int/float
603 if op.layout.is_zst() {
604 return Ok(());
605 }
606 // non-ZST array cannot be immediate, slices are never immediate
607 let mplace = op.assert_mem_place();
608 // This is the length of the array/slice.
609 let len = mplace.len(self.ecx)?;
610 // zero length slices have nothing to be checked
611 if len == 0 {
612 return Ok(());
613 }
614 // This is the element type size.
615 let ty_size = self.ecx.layout_of(tys)?.size;
616 // This is the size in bytes of the whole array.
617 let size = ty_size * len;
618 // Size is not 0, get a pointer.
619 let ptr = self.ecx.force_ptr(mplace.ptr)?;
620
621 // Optimization: we just check the entire range at once.
622 // NOTE: Keep this in sync with the handling of integer and float
623 // types above, in `visit_primitive`.
624 // In run-time mode, we accept pointers in here. This is actually more
625 // permissive than a per-element check would be, e.g., we accept
626 // an &[u8] that contains a pointer even though bytewise checking would
627 // reject it. However, that's good: We don't inherently want
628 // to reject those pointers, we just do not have the machinery to
629 // talk about parts of a pointer.
630 // We also accept undef, for consistency with the slow path.
631 match self.ecx.memory.get_raw(ptr.alloc_id)?.check_bytes(
632 self.ecx,
633 ptr,
634 size,
635 /*allow_ptr_and_undef*/ self.ref_tracking_for_consts.is_none(),
636 ) {
637 // In the happy case, we needn't check anything else.
638 Ok(()) => {}
639 // Some error happened, try to provide a more detailed description.
640 Err(err) => {
641 // For some errors we might be able to provide extra information
642 match err.kind {
643 err_unsup!(ReadUndefBytes(offset)) => {
644 // Some byte was undefined, determine which
645 // element that byte belongs to so we can
646 // provide an index.
647 let i = (offset.bytes() / ty_size.bytes()) as usize;
648 self.path.push(PathElem::ArrayElem(i));
649
650 throw_validation_failure!("undefined bytes", self.path)
651 }
652 // Other errors shouldn't be possible
653 _ => return Err(err),
654 }
655 }
656 }
657 }
658 _ => {
659 self.walk_aggregate(op, fields)? // default handler
660 }
661 }
662 Ok(())
663 }
664 }
665
666 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
667 /// This function checks the data at `op`. `op` is assumed to cover valid memory if it
668 /// is an indirect operand.
669 /// It will error if the bits at the destination do not match the ones described by the layout.
670 ///
671 /// `ref_tracking_for_consts` can be `None` to avoid recursive checking below references.
672 /// This also toggles between "run-time" (no recursion) and "compile-time" (with recursion)
673 /// validation (e.g., pointer values are fine in integers at runtime) and various other const
674 /// specific validation checks.
675 pub fn validate_operand(
676 &self,
677 op: OpTy<'tcx, M::PointerTag>,
678 path: Vec<PathElem>,
679 ref_tracking_for_consts: Option<
680 &mut RefTracking<MPlaceTy<'tcx, M::PointerTag>, Vec<PathElem>>,
681 >,
682 ) -> InterpResult<'tcx> {
683 trace!("validate_operand: {:?}, {:?}", *op, op.layout.ty);
684
685 // Construct a visitor
686 let mut visitor = ValidityVisitor { path, ref_tracking_for_consts, ecx: self };
687
688 // Try to cast to ptr *once* instead of all the time.
689 let op = self.force_op_ptr(op).unwrap_or(op);
690
691 // Run it
692 visitor.visit_value(op)
693 }
694 }