3 use rustc
::hir
::def_id
::DefId
;
4 use rustc
::hir
::def
::Def
;
5 use rustc
::hir
::map
::definitions
::DefPathData
;
6 use rustc
::middle
::const_val
::ConstVal
;
8 use rustc
::ty
::layout
::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout}
;
9 use rustc
::ty
::subst
::{Subst, Substs}
;
10 use rustc
::ty
::{self, Ty, TyCtxt, TypeAndMut}
;
11 use rustc
::ty
::query
::TyCtxtAt
;
12 use rustc_data_structures
::indexed_vec
::{IndexVec, Idx}
;
13 use rustc
::middle
::const_val
::FrameInfo
;
14 use syntax
::codemap
::{self, Span}
;
15 use syntax
::ast
::Mutability
;
16 use rustc
::mir
::interpret
::{
17 GlobalId
, Value
, Scalar
,
18 EvalResult
, EvalErrorKind
, Pointer
, ConstValue
,
22 use super::{Place
, PlaceExtra
, Memory
,
23 HasMemory
, MemoryKind
,
26 pub struct EvalContext
<'a
, 'mir
, 'tcx
: 'a
+ 'mir
, M
: Machine
<'mir
, 'tcx
>> {
27 /// Stores the `Machine` instance.
30 /// The results of the type checker, from rustc.
31 pub tcx
: TyCtxtAt
<'a
, 'tcx
, 'tcx
>,
33 /// Bounds in scope for polymorphic evaluations.
34 pub param_env
: ty
::ParamEnv
<'tcx
>,
36 /// The virtual memory system.
37 pub memory
: Memory
<'a
, 'mir
, 'tcx
, M
>,
39 /// The virtual call stack.
40 pub(crate) stack
: Vec
<Frame
<'mir
, 'tcx
>>,
42 /// The maximum number of stack frames allowed
43 pub(crate) stack_limit
: usize,
45 /// The maximum number of terminators that may be evaluated.
46 /// This prevents infinite loops and huge computations from freezing up const eval.
47 /// Remove once halting problem is solved.
48 pub(crate) terminators_remaining
: usize,
52 pub struct Frame
<'mir
, 'tcx
: 'mir
> {
53 ////////////////////////////////////////////////////////////////////////////////
54 // Function and callsite information
55 ////////////////////////////////////////////////////////////////////////////////
56 /// The MIR for the function called on this frame.
57 pub mir
: &'mir mir
::Mir
<'tcx
>,
59 /// The def_id and substs of the current function
60 pub instance
: ty
::Instance
<'tcx
>,
62 /// The span of the call site.
63 pub span
: codemap
::Span
,
65 ////////////////////////////////////////////////////////////////////////////////
66 // Return place and locals
67 ////////////////////////////////////////////////////////////////////////////////
68 /// The block to return to when returning from the current stack frame
69 pub return_to_block
: StackPopCleanup
,
71 /// The location where the result of the current stack frame should be written to.
72 pub return_place
: Place
,
74 /// The list of locals for this stack frame, stored in order as
75 /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
76 /// `None` represents a local that is currently dead, while a live local
77 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
79 /// Before being initialized, arguments are `Value::Scalar(Scalar::undef())` and other locals are `None`.
80 pub locals
: IndexVec
<mir
::Local
, Option
<Value
>>,
82 ////////////////////////////////////////////////////////////////////////////////
83 // Current position within the function
84 ////////////////////////////////////////////////////////////////////////////////
85 /// The block that is currently executed (or will be executed after the above call stacks
87 pub block
: mir
::BasicBlock
,
89 /// The index of the currently evaluated statement.
93 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
94 pub enum StackPopCleanup
{
95 /// The stackframe existed to compute the initial value of a static/constant, make sure it
96 /// isn't modifyable afterwards in case of constants.
97 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
98 /// references or deallocated
99 MarkStatic(Mutability
),
100 /// A regular stackframe added due to a function call will need to get forwarded to the next
102 Goto(mir
::BasicBlock
),
103 /// The main function and diverging functions have nowhere to return to
107 #[derive(Copy, Clone, Debug)]
108 pub struct TyAndPacked
<'tcx
> {
113 #[derive(Copy, Clone, Debug)]
114 pub struct ValTy
<'tcx
> {
119 impl<'tcx
> ::std
::ops
::Deref
for ValTy
<'tcx
> {
121 fn deref(&self) -> &Value
{
126 impl<'a
, 'mir
, 'tcx
, M
: Machine
<'mir
, 'tcx
>> HasDataLayout
for &'a EvalContext
<'a
, 'mir
, 'tcx
, M
> {
128 fn data_layout(&self) -> &layout
::TargetDataLayout
{
129 &self.tcx
.data_layout
133 impl<'c
, 'b
, 'a
, 'mir
, 'tcx
, M
: Machine
<'mir
, 'tcx
>> HasDataLayout
134 for &'c
&'b
mut EvalContext
<'a
, 'mir
, 'tcx
, M
> {
136 fn data_layout(&self) -> &layout
::TargetDataLayout
{
137 &self.tcx
.data_layout
141 impl<'a
, 'mir
, 'tcx
, M
: Machine
<'mir
, 'tcx
>> layout
::HasTyCtxt
<'tcx
> for &'a EvalContext
<'a
, 'mir
, 'tcx
, M
> {
143 fn tcx
<'b
>(&'b
self) -> TyCtxt
<'b
, 'tcx
, 'tcx
> {
148 impl<'c
, 'b
, 'a
, 'mir
, 'tcx
, M
: Machine
<'mir
, 'tcx
>> layout
::HasTyCtxt
<'tcx
>
149 for &'c
&'b
mut EvalContext
<'a
, 'mir
, 'tcx
, M
> {
151 fn tcx
<'d
>(&'d
self) -> TyCtxt
<'d
, 'tcx
, 'tcx
> {
156 impl<'a
, 'mir
, 'tcx
, M
: Machine
<'mir
, 'tcx
>> LayoutOf
for &'a EvalContext
<'a
, 'mir
, 'tcx
, M
> {
158 type TyLayout
= EvalResult
<'tcx
, TyLayout
<'tcx
>>;
160 fn layout_of(self, ty
: Ty
<'tcx
>) -> Self::TyLayout
{
161 self.tcx
.layout_of(self.param_env
.and(ty
))
162 .map_err(|layout
| EvalErrorKind
::Layout(layout
).into())
166 impl<'c
, 'b
, 'a
, 'mir
, 'tcx
, M
: Machine
<'mir
, 'tcx
>> LayoutOf
167 for &'c
&'b
mut EvalContext
<'a
, 'mir
, 'tcx
, M
> {
169 type TyLayout
= EvalResult
<'tcx
, TyLayout
<'tcx
>>;
172 fn layout_of(self, ty
: Ty
<'tcx
>) -> Self::TyLayout
{
173 (&**self).layout_of(ty
)
177 const MAX_TERMINATORS
: usize = 1_000_000;
179 impl<'a
, 'mir
, 'tcx
: 'mir
, M
: Machine
<'mir
, 'tcx
>> EvalContext
<'a
, 'mir
, 'tcx
, M
> {
181 tcx
: TyCtxtAt
<'a
, 'tcx
, 'tcx
>,
182 param_env
: ty
::ParamEnv
<'tcx
>,
184 memory_data
: M
::MemoryData
,
190 memory
: Memory
::new(tcx
, memory_data
),
192 stack_limit
: tcx
.sess
.const_eval_stack_frame_limit
,
193 terminators_remaining
: MAX_TERMINATORS
,
197 pub(crate) fn with_fresh_body
<F
: FnOnce(&mut Self) -> R
, R
>(&mut self, f
: F
) -> R
{
198 let stack
= mem
::replace(&mut self.stack
, Vec
::new());
199 let terminators_remaining
= mem
::replace(&mut self.terminators_remaining
, MAX_TERMINATORS
);
202 self.terminators_remaining
= terminators_remaining
;
206 pub fn alloc_ptr(&mut self, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
, Pointer
> {
207 let layout
= self.layout_of(ty
)?
;
208 assert
!(!layout
.is_unsized(), "cannot alloc memory for unsized type");
210 self.memory
.allocate(layout
.size
, layout
.align
, Some(MemoryKind
::Stack
))
213 pub fn memory(&self) -> &Memory
<'a
, 'mir
, 'tcx
, M
> {
217 pub fn memory_mut(&mut self) -> &mut Memory
<'a
, 'mir
, 'tcx
, M
> {
221 pub fn stack(&self) -> &[Frame
<'mir
, 'tcx
>] {
226 pub fn cur_frame(&self) -> usize {
227 assert
!(self.stack
.len() > 0);
231 pub fn str_to_value(&mut self, s
: &str) -> EvalResult
<'tcx
, Value
> {
232 let ptr
= self.memory
.allocate_bytes(s
.as_bytes());
233 Ok(Scalar
::Ptr(ptr
).to_value_with_len(s
.len() as u64, self.tcx
.tcx
))
236 pub fn const_value_to_value(
238 val
: ConstValue
<'tcx
>,
240 ) -> EvalResult
<'tcx
, Value
> {
242 ConstValue
::ByRef(alloc
, offset
) => {
243 // FIXME: Allocate new AllocId for all constants inside
244 let id
= self.memory
.allocate_value(alloc
.clone(), Some(MemoryKind
::Stack
))?
;
245 Ok(Value
::ByRef(Pointer
::new(id
, offset
).into(), alloc
.align
))
247 ConstValue
::ScalarPair(a
, b
) => Ok(Value
::ScalarPair(a
, b
)),
248 ConstValue
::Scalar(val
) => Ok(Value
::Scalar(val
)),
252 pub(super) fn const_to_value(
254 const_val
: &ConstVal
<'tcx
>,
256 ) -> EvalResult
<'tcx
, Value
> {
258 ConstVal
::Unevaluated(def_id
, substs
) => {
259 let instance
= self.resolve(def_id
, substs
)?
;
260 self.read_global_as_value(GlobalId
{
265 ConstVal
::Value(val
) => self.const_value_to_value(val
, ty
)
269 pub(super) fn resolve(&self, def_id
: DefId
, substs
: &'tcx Substs
<'tcx
>) -> EvalResult
<'tcx
, ty
::Instance
<'tcx
>> {
270 trace
!("resolve: {:?}, {:#?}", def_id
, substs
);
271 trace
!("substs: {:#?}", self.substs());
272 trace
!("param_env: {:#?}", self.param_env
);
273 let substs
= self.tcx
.subst_and_normalize_erasing_regions(
278 ty
::Instance
::resolve(
283 ).ok_or_else(|| EvalErrorKind
::TypeckError
.into()) // turn error prop into a panic to expose associated type in const issue
286 pub(super) fn type_is_sized(&self, ty
: Ty
<'tcx
>) -> bool
{
287 ty
.is_sized(self.tcx
, self.param_env
)
292 instance
: ty
::InstanceDef
<'tcx
>,
293 ) -> EvalResult
<'tcx
, &'tcx mir
::Mir
<'tcx
>> {
294 // do not continue if typeck errors occurred (can only occur in local crate)
295 let did
= instance
.def_id();
296 if did
.is_local() && self.tcx
.has_typeck_tables(did
) && self.tcx
.typeck_tables_of(did
).tainted_by_errors
{
297 return err
!(TypeckError
);
299 trace
!("load mir {:?}", instance
);
301 ty
::InstanceDef
::Item(def_id
) => {
302 self.tcx
.maybe_optimized_mir(def_id
).ok_or_else(||
303 EvalErrorKind
::NoMirFor(self.tcx
.item_path_str(def_id
)).into()
306 _
=> Ok(self.tcx
.instance_mir(instance
)),
310 pub fn monomorphize(&self, ty
: Ty
<'tcx
>, substs
: &'tcx Substs
<'tcx
>) -> Ty
<'tcx
> {
311 // miri doesn't care about lifetimes, and will choke on some crazy ones
312 // let's simply get rid of them
313 let substituted
= ty
.subst(*self.tcx
, substs
);
314 self.tcx
.normalize_erasing_regions(ty
::ParamEnv
::reveal_all(), substituted
)
317 /// Return the size and aligment of the value at the given type.
318 /// Note that the value does not matter if the type is sized. For unsized types,
319 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
320 pub fn size_and_align_of_dst(
324 ) -> EvalResult
<'tcx
, (Size
, Align
)> {
325 let layout
= self.layout_of(ty
)?
;
326 if !layout
.is_unsized() {
327 Ok(layout
.size_and_align())
330 ty
::TyAdt(..) | ty
::TyTuple(..) => {
331 // First get the size of all statically known fields.
332 // Don't use type_of::sizing_type_of because that expects t to be sized,
333 // and it also rounds up to alignment, which we want to avoid,
334 // as the unsized field's alignment could be smaller.
335 assert
!(!ty
.is_simd());
336 debug
!("DST {} layout: {:?}", ty
, layout
);
338 let sized_size
= layout
.fields
.offset(layout
.fields
.count() - 1);
339 let sized_align
= layout
.align
;
341 "DST {} statically sized prefix size: {:?} align: {:?}",
347 // Recurse to get the size of the dynamically sized field (must be
349 let field_ty
= layout
.field(&self, layout
.fields
.count() - 1)?
.ty
;
350 let (unsized_size
, unsized_align
) =
351 self.size_and_align_of_dst(field_ty
, value
)?
;
353 // FIXME (#26403, #27023): We should be adding padding
354 // to `sized_size` (to accommodate the `unsized_align`
355 // required of the unsized field that follows) before
356 // summing it with `sized_size`. (Note that since #26403
357 // is unfixed, we do not yet add the necessary padding
358 // here. But this is where the add would go.)
360 // Return the sum of sizes and max of aligns.
361 let size
= sized_size
+ unsized_size
;
363 // Choose max of two known alignments (combined value must
364 // be aligned according to more restrictive of the two).
365 let align
= sized_align
.max(unsized_align
);
367 // Issue #27023: must add any necessary padding to `size`
368 // (to make it a multiple of `align`) before returning it.
370 // Namely, the returned size should be, in C notation:
372 // `size + ((size & (align-1)) ? align : 0)`
374 // emulated via the semi-standard fast bit trick:
376 // `(size + (align-1)) & -align`
378 Ok((size
.abi_align(align
), align
))
380 ty
::TyDynamic(..) => {
381 let (_
, vtable
) = self.into_ptr_vtable_pair(value
)?
;
382 // the second entry in the vtable is the dynamic size of the object.
383 self.read_size_and_align_from_vtable(vtable
)
386 ty
::TySlice(_
) | ty
::TyStr
=> {
387 let (elem_size
, align
) = layout
.field(&self, 0)?
.size_and_align();
388 let (_
, len
) = self.into_slice(value
)?
;
389 Ok((elem_size
* len
, align
))
392 _
=> bug
!("size_of_val::<{:?}>", ty
),
397 pub fn push_stack_frame(
399 instance
: ty
::Instance
<'tcx
>,
401 mir
: &'mir mir
::Mir
<'tcx
>,
403 return_to_block
: StackPopCleanup
,
404 ) -> EvalResult
<'tcx
> {
405 ::log_settings
::settings().indentation
+= 1;
407 let locals
= if mir
.local_decls
.len() > 1 {
408 let mut locals
= IndexVec
::from_elem(Some(Value
::Scalar(Scalar
::undef())), &mir
.local_decls
);
409 match self.tcx
.describe_def(instance
.def_id()) {
410 // statics and constants don't have `Storage*` statements, no need to look for them
411 Some(Def
::Static(..)) | Some(Def
::Const(..)) | Some(Def
::AssociatedConst(..)) => {}
,
413 trace
!("push_stack_frame: {:?}: num_bbs: {}", span
, mir
.basic_blocks().len());
414 for block
in mir
.basic_blocks() {
415 for stmt
in block
.statements
.iter() {
416 use rustc
::mir
::StatementKind
::{StorageDead, StorageLive}
;
419 StorageDead(local
) => locals
[local
] = None
,
428 // don't allocate at all for trivial constants
432 self.stack
.push(Frame
{
434 block
: mir
::START_BLOCK
,
443 self.memory
.cur_frame
= self.cur_frame();
445 if self.stack
.len() > self.stack_limit
{
446 err
!(StackFrameLimitReached
)
452 pub(super) fn pop_stack_frame(&mut self) -> EvalResult
<'tcx
> {
453 ::log_settings
::settings().indentation
-= 1;
454 M
::end_region(self, None
)?
;
455 let frame
= self.stack
.pop().expect(
456 "tried to pop a stack frame, but there were none",
458 if !self.stack
.is_empty() {
459 // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
460 self.memory
.cur_frame
= self.cur_frame();
462 match frame
.return_to_block
{
463 StackPopCleanup
::MarkStatic(mutable
) => {
464 if let Place
::Ptr { ptr, .. }
= frame
.return_place
{
465 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
466 self.memory
.mark_static_initialized(
467 ptr
.to_ptr()?
.alloc_id
,
471 bug
!("StackPopCleanup::MarkStatic on: {:?}", frame
.return_place
);
474 StackPopCleanup
::Goto(target
) => self.goto_block(target
),
475 StackPopCleanup
::None
=> {}
477 // deallocate all locals that are backed by an allocation
478 for local
in frame
.locals
{
479 self.deallocate_local(local
)?
;
485 pub fn deallocate_local(&mut self, local
: Option
<Value
>) -> EvalResult
<'tcx
> {
486 if let Some(Value
::ByRef(ptr
, _align
)) = local
{
487 trace
!("deallocating local");
488 let ptr
= ptr
.to_ptr()?
;
489 self.memory
.dump_alloc(ptr
.alloc_id
);
490 self.memory
.deallocate_local(ptr
)?
;
495 /// Evaluate an assignment statement.
497 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
498 /// type writes its results directly into the memory specified by the place.
499 pub(super) fn eval_rvalue_into_place(
501 rvalue
: &mir
::Rvalue
<'tcx
>,
502 place
: &mir
::Place
<'tcx
>,
503 ) -> EvalResult
<'tcx
> {
504 let dest
= self.eval_place(place
)?
;
505 let dest_ty
= self.place_ty(place
);
507 use rustc
::mir
::Rvalue
::*;
509 Use(ref operand
) => {
510 let value
= self.eval_operand(operand
)?
.value
;
515 self.write_value(valty
, dest
)?
;
518 BinaryOp(bin_op
, ref left
, ref right
) => {
519 let left
= self.eval_operand(left
)?
;
520 let right
= self.eval_operand(right
)?
;
521 self.intrinsic_overflowing(
530 CheckedBinaryOp(bin_op
, ref left
, ref right
) => {
531 let left
= self.eval_operand(left
)?
;
532 let right
= self.eval_operand(right
)?
;
533 self.intrinsic_with_overflow(
542 UnaryOp(un_op
, ref operand
) => {
543 let val
= self.eval_operand_to_scalar(operand
)?
;
544 let val
= self.unary_op(un_op
, val
, dest_ty
)?
;
552 Aggregate(ref kind
, ref operands
) => {
553 self.inc_step_counter_and_check_limit(operands
.len());
555 let (dest
, active_field_index
) = match **kind
{
556 mir
::AggregateKind
::Adt(adt_def
, variant_index
, _
, active_field_index
) => {
557 self.write_discriminant_value(dest_ty
, dest
, variant_index
)?
;
558 if adt_def
.is_enum() {
559 (self.place_downcast(dest
, variant_index
)?
, active_field_index
)
561 (dest
, active_field_index
)
567 let layout
= self.layout_of(dest_ty
)?
;
568 for (i
, operand
) in operands
.iter().enumerate() {
569 let value
= self.eval_operand(operand
)?
;
570 // Ignore zero-sized fields.
571 if !self.layout_of(value
.ty
)?
.is_zst() {
572 let field_index
= active_field_index
.unwrap_or(i
);
573 let (field_dest
, _
) = self.place_field(dest
, mir
::Field
::new(field_index
), layout
)?
;
574 self.write_value(value
, field_dest
)?
;
579 Repeat(ref operand
, _
) => {
580 let (elem_ty
, length
) = match dest_ty
.sty
{
581 ty
::TyArray(elem_ty
, n
) => (elem_ty
, n
.unwrap_usize(self.tcx
.tcx
)),
584 "tried to assign array-repeat to non-array type {:?}",
589 let elem_size
= self.layout_of(elem_ty
)?
.size
;
590 let value
= self.eval_operand(operand
)?
.value
;
592 let (dest
, dest_align
) = self.force_allocation(dest
)?
.to_ptr_align();
594 // FIXME: speed up repeat filling
596 let elem_dest
= dest
.ptr_offset(elem_size
* i
as u64, &self)?
;
597 self.write_value_to_ptr(value
, elem_dest
, dest_align
, elem_ty
)?
;
602 // FIXME(CTFE): don't allow computing the length of arrays in const eval
603 let src
= self.eval_place(place
)?
;
604 let ty
= self.place_ty(place
);
605 let (_
, len
) = src
.elem_ty_and_len(ty
, self.tcx
.tcx
);
606 let defined
= self.memory
.pointer_size().bits() as u8;
617 Ref(_
, _
, ref place
) => {
618 let src
= self.eval_place(place
)?
;
619 // We ignore the alignment of the place here -- special handling for packed structs ends
620 // at the `&` operator.
621 let (ptr
, _align
, extra
) = self.force_allocation(src
)?
.to_ptr_align_extra();
623 let val
= match extra
{
624 PlaceExtra
::None
=> ptr
.to_value(),
625 PlaceExtra
::Length(len
) => ptr
.to_value_with_len(len
, self.tcx
.tcx
),
626 PlaceExtra
::Vtable(vtable
) => ptr
.to_value_with_vtable(vtable
),
627 PlaceExtra
::DowncastVariant(..) => {
628 bug
!("attempted to take a reference to an enum downcast place")
635 self.write_value(valty
, dest
)?
;
638 NullaryOp(mir
::NullOp
::Box
, ty
) => {
639 let ty
= self.monomorphize(ty
, self.substs());
640 M
::box_alloc(self, ty
, dest
)?
;
643 NullaryOp(mir
::NullOp
::SizeOf
, ty
) => {
644 let ty
= self.monomorphize(ty
, self.substs());
645 let layout
= self.layout_of(ty
)?
;
646 assert
!(!layout
.is_unsized(),
647 "SizeOf nullary MIR operator called for unsized type");
648 let defined
= self.memory
.pointer_size().bits() as u8;
652 bits
: layout
.size
.bytes() as u128
,
659 Cast(kind
, ref operand
, cast_ty
) => {
660 debug_assert_eq
!(self.monomorphize(cast_ty
, self.substs()), dest_ty
);
661 use rustc
::mir
::CastKind
::*;
664 let src
= self.eval_operand(operand
)?
;
665 let src_layout
= self.layout_of(src
.ty
)?
;
666 let dst_layout
= self.layout_of(dest_ty
)?
;
667 self.unsize_into(src
.value
, src_layout
, dest
, dst_layout
)?
;
671 let src
= self.eval_operand(operand
)?
;
672 if self.type_is_fat_ptr(src
.ty
) {
673 match (src
.value
, self.type_is_fat_ptr(dest_ty
)) {
674 (Value
::ByRef { .. }
, _
) |
675 // pointers to extern types
676 (Value
::Scalar(_
),_
) |
677 // slices and trait objects to other slices/trait objects
678 (Value
::ScalarPair(..), true) => {
683 self.write_value(valty
, dest
)?
;
685 // slices and trait objects to thin pointers (dropping the metadata)
686 (Value
::ScalarPair(data
, _
), false) => {
688 value
: Value
::Scalar(data
),
691 self.write_value(valty
, dest
)?
;
695 let src_layout
= self.layout_of(src
.ty
)?
;
696 match src_layout
.variants
{
697 layout
::Variants
::Single { index }
=> {
698 if let Some(def
) = src
.ty
.ty_adt_def() {
700 .discriminant_for_variant(*self.tcx
, index
)
707 return self.write_scalar(
716 layout
::Variants
::Tagged { .. }
|
717 layout
::Variants
::NicheFilling { .. }
=> {}
,
720 let src_val
= self.value_to_scalar(src
)?
;
721 let dest_val
= self.cast_scalar(src_val
, src
.ty
, dest_ty
)?
;
723 value
: Value
::Scalar(dest_val
),
726 self.write_value(valty
, dest
)?
;
731 match self.eval_operand(operand
)?
.ty
.sty
{
732 ty
::TyFnDef(def_id
, substs
) => {
733 if self.tcx
.has_attr(def_id
, "rustc_args_required_const") {
734 bug
!("reifying a fn ptr that requires \
737 let instance
: EvalResult
<'tcx
, _
> = ty
::Instance
::resolve(
742 ).ok_or_else(|| EvalErrorKind
::TypeckError
.into());
743 let fn_ptr
= self.memory
.create_fn_alloc(instance?
);
745 value
: Value
::Scalar(fn_ptr
.into()),
748 self.write_value(valty
, dest
)?
;
750 ref other
=> bug
!("reify fn pointer on {:?}", other
),
757 let mut src
= self.eval_operand(operand
)?
;
759 self.write_value(src
, dest
)?
;
761 ref other
=> bug
!("fn to unsafe fn cast on {:?}", other
),
765 ClosureFnPointer
=> {
766 match self.eval_operand(operand
)?
.ty
.sty
{
767 ty
::TyClosure(def_id
, substs
) => {
768 let substs
= self.tcx
.subst_and_normalize_erasing_regions(
770 ty
::ParamEnv
::reveal_all(),
773 let instance
= ty
::Instance
::resolve_closure(
777 ty
::ClosureKind
::FnOnce
,
779 let fn_ptr
= self.memory
.create_fn_alloc(instance
);
781 value
: Value
::Scalar(fn_ptr
.into()),
784 self.write_value(valty
, dest
)?
;
786 ref other
=> bug
!("closure fn pointer on {:?}", other
),
792 Discriminant(ref place
) => {
793 let ty
= self.place_ty(place
);
794 let place
= self.eval_place(place
)?
;
795 let discr_val
= self.read_discriminant_value(place
, ty
)?
;
796 let defined
= self.layout_of(dest_ty
).unwrap().size
.bits() as u8;
797 self.write_scalar(dest
, Scalar
::Bits
{
804 self.dump_local(dest
);
809 pub(super) fn type_is_fat_ptr(&self, ty
: Ty
<'tcx
>) -> bool
{
811 ty
::TyRawPtr(ty
::TypeAndMut { ty, .. }
) |
812 ty
::TyRef(_
, ty
, _
) => !self.type_is_sized(ty
),
813 ty
::TyAdt(def
, _
) if def
.is_box() => !self.type_is_sized(ty
.boxed_ty()),
818 pub(super) fn eval_operand_to_scalar(
820 op
: &mir
::Operand
<'tcx
>,
821 ) -> EvalResult
<'tcx
, Scalar
> {
822 let valty
= self.eval_operand(op
)?
;
823 self.value_to_scalar(valty
)
826 pub(crate) fn operands_to_args(
828 ops
: &[mir
::Operand
<'tcx
>],
829 ) -> EvalResult
<'tcx
, Vec
<ValTy
<'tcx
>>> {
831 .map(|op
| self.eval_operand(op
))
835 pub fn eval_operand(&mut self, op
: &mir
::Operand
<'tcx
>) -> EvalResult
<'tcx
, ValTy
<'tcx
>> {
836 use rustc
::mir
::Operand
::*;
837 let ty
= self.monomorphize(op
.ty(self.mir(), *self.tcx
), self.substs());
839 // FIXME: do some more logic on `move` to invalidate the old location
843 value
: self.eval_and_read_place(place
)?
,
848 Constant(ref constant
) => {
849 use rustc
::mir
::Literal
;
850 let mir
::Constant { ref literal, .. }
= **constant
;
851 let value
= match *literal
{
852 Literal
::Value { ref value }
=> self.const_to_value(&value
.val
, ty
)?
,
854 Literal
::Promoted { index }
=> {
855 let instance
= self.frame().instance
;
856 self.read_global_as_value(GlobalId
{
858 promoted
: Some(index
),
871 /// reads a tag and produces the corresponding variant index
872 pub fn read_discriminant_as_variant_index(
876 ) -> EvalResult
<'tcx
, usize> {
877 let layout
= self.layout_of(ty
)?
;
878 match layout
.variants
{
879 ty
::layout
::Variants
::Single { index }
=> Ok(index
),
880 ty
::layout
::Variants
::Tagged { .. }
=> {
881 let discr_val
= self.read_discriminant_value(place
, ty
)?
;
884 .expect("tagged layout for non adt")
885 .discriminants(self.tcx
.tcx
)
886 .position(|var
| var
.val
== discr_val
)
887 .ok_or_else(|| EvalErrorKind
::InvalidDiscriminant
.into())
889 ty
::layout
::Variants
::NicheFilling { .. }
=> {
890 let discr_val
= self.read_discriminant_value(place
, ty
)?
;
891 assert_eq
!(discr_val
as usize as u128
, discr_val
);
892 Ok(discr_val
as usize)
897 pub fn read_discriminant_value(
901 ) -> EvalResult
<'tcx
, u128
> {
902 let layout
= self.layout_of(ty
)?
;
903 trace
!("read_discriminant_value {:#?}", layout
);
904 if layout
.abi
== layout
::Abi
::Uninhabited
{
908 match layout
.variants
{
909 layout
::Variants
::Single { index }
=> {
910 let discr_val
= ty
.ty_adt_def().map_or(
912 |def
| def
.discriminant_for_variant(*self.tcx
, index
).val
);
913 return Ok(discr_val
);
915 layout
::Variants
::Tagged { .. }
|
916 layout
::Variants
::NicheFilling { .. }
=> {}
,
919 let (discr_place
, discr
) = self.place_field(place
, mir
::Field
::new(0), layout
)?
;
920 trace
!("discr place: {:?}, {:?}", discr_place
, discr
);
921 let raw_discr
= self.value_to_scalar(ValTy
{
922 value
: self.read_place(discr_place
)?
,
925 let discr_val
= match layout
.variants
{
926 layout
::Variants
::Single { .. }
=> bug
!(),
927 // FIXME: should we catch invalid discriminants here?
928 layout
::Variants
::Tagged { .. }
=> {
929 if discr
.ty
.is_signed() {
930 let i
= raw_discr
.to_bits(discr
.size
)?
as i128
;
931 // going from layout tag type to typeck discriminant type
932 // requires first sign extending with the layout discriminant
933 let shift
= 128 - discr
.size
.bits();
934 let sexted
= (i
<< shift
) >> shift
;
935 // and then zeroing with the typeck discriminant type
937 .ty_adt_def().expect("tagged layout corresponds to adt")
940 let discr_ty
= layout
::Integer
::from_attr(self.tcx
.tcx
, discr_ty
);
941 let shift
= 128 - discr_ty
.size().bits();
942 let truncatee
= sexted
as u128
;
943 (truncatee
<< shift
) >> shift
945 raw_discr
.to_bits(discr
.size
)?
948 layout
::Variants
::NicheFilling
{
954 let variants_start
= *niche_variants
.start() as u128
;
955 let variants_end
= *niche_variants
.end() as u128
;
958 assert
!(niche_start
== 0);
959 assert
!(variants_start
== variants_end
);
960 dataful_variant
as u128
962 Scalar
::Bits { bits: raw_discr, defined }
=> {
963 if defined
< discr
.size
.bits() as u8 {
964 return err
!(ReadUndefBytes
);
966 let discr
= raw_discr
.wrapping_sub(niche_start
)
967 .wrapping_add(variants_start
);
968 if variants_start
<= discr
&& discr
<= variants_end
{
971 dataful_variant
as u128
982 pub fn write_discriminant_value(
986 variant_index
: usize,
987 ) -> EvalResult
<'tcx
> {
988 let layout
= self.layout_of(dest_ty
)?
;
990 match layout
.variants
{
991 layout
::Variants
::Single { index }
=> {
992 if index
!= variant_index
{
993 // If the layout of an enum is `Single`, all
994 // other variants are necessarily uninhabited.
995 assert_eq
!(layout
.for_variant(&self, variant_index
).abi
,
996 layout
::Abi
::Uninhabited
);
999 layout
::Variants
::Tagged { ref tag, .. }
=> {
1000 let discr_val
= dest_ty
.ty_adt_def().unwrap()
1001 .discriminant_for_variant(*self.tcx
, variant_index
)
1004 // raw discriminants for enums are isize or bigger during
1005 // their computation, but the in-memory tag is the smallest possible
1007 let size
= tag
.value
.size(self.tcx
.tcx
).bits();
1008 let shift
= 128 - size
;
1009 let discr_val
= (discr_val
<< shift
) >> shift
;
1011 let (discr_dest
, tag
) = self.place_field(dest
, mir
::Field
::new(0), layout
)?
;
1012 self.write_scalar(discr_dest
, Scalar
::Bits
{
1014 defined
: size
as u8,
1017 layout
::Variants
::NicheFilling
{
1023 if variant_index
!= dataful_variant
{
1024 let (niche_dest
, niche
) =
1025 self.place_field(dest
, mir
::Field
::new(0), layout
)?
;
1026 let niche_value
= ((variant_index
- niche_variants
.start()) as u128
)
1027 .wrapping_add(niche_start
);
1028 self.write_scalar(niche_dest
, Scalar
::Bits
{
1030 defined
: niche
.size
.bits() as u8,
1039 pub fn read_global_as_value(&mut self, gid
: GlobalId
<'tcx
>, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
, Value
> {
1040 if self.tcx
.is_static(gid
.instance
.def_id()).is_some() {
1045 .intern_static(gid
.instance
.def_id());
1046 let layout
= self.layout_of(ty
)?
;
1047 return Ok(Value
::ByRef(Scalar
::Ptr(alloc_id
.into()), layout
.align
))
1049 let cv
= self.const_eval(gid
)?
;
1050 self.const_to_value(&cv
.val
, ty
)
1053 pub fn const_eval(&self, gid
: GlobalId
<'tcx
>) -> EvalResult
<'tcx
, &'tcx ty
::Const
<'tcx
>> {
1054 let param_env
= if self.tcx
.is_static(gid
.instance
.def_id()).is_some() {
1055 ty
::ParamEnv
::reveal_all()
1059 self.tcx
.const_eval(param_env
.and(gid
)).map_err(|err
| EvalErrorKind
::ReferencedConstant(err
).into())
1062 pub fn force_allocation(&mut self, place
: Place
) -> EvalResult
<'tcx
, Place
> {
1063 let new_place
= match place
{
1064 Place
::Local { frame, local }
=> {
1065 match self.stack
[frame
].locals
[local
] {
1066 None
=> return err
!(DeadLocal
),
1067 Some(Value
::ByRef(ptr
, align
)) => {
1071 extra
: PlaceExtra
::None
,
1075 let ty
= self.stack
[frame
].mir
.local_decls
[local
].ty
;
1076 let ty
= self.monomorphize(ty
, self.stack
[frame
].instance
.substs
);
1077 let layout
= self.layout_of(ty
)?
;
1078 let ptr
= self.alloc_ptr(ty
)?
;
1079 self.stack
[frame
].locals
[local
] =
1080 Some(Value
::ByRef(ptr
.into(), layout
.align
)); // it stays live
1081 let place
= Place
::from_ptr(ptr
, layout
.align
);
1082 self.write_value(ValTy { value: val, ty }
, place
)?
;
1087 Place
::Ptr { .. }
=> place
,
1092 /// ensures this Value is not a ByRef
1093 pub fn follow_by_ref_value(
1097 ) -> EvalResult
<'tcx
, Value
> {
1099 Value
::ByRef(ptr
, align
) => {
1100 self.read_value(ptr
, align
, ty
)
1106 pub fn value_to_scalar(
1108 ValTy { value, ty }
: ValTy
<'tcx
>,
1109 ) -> EvalResult
<'tcx
, Scalar
> {
1110 match self.follow_by_ref_value(value
, ty
)?
{
1111 Value
::ByRef { .. }
=> bug
!("follow_by_ref_value can't result in `ByRef`"),
1113 Value
::Scalar(scalar
) => {
1114 // TODO: Do we really want insta-UB here?
1115 self.ensure_valid_value(scalar
, ty
)?
;
1119 Value
::ScalarPair(..) => bug
!("value_to_scalar can't work with fat pointers"),
1123 pub fn write_ptr(&mut self, dest
: Place
, val
: Scalar
, dest_ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
> {
1125 value
: val
.to_value(),
1128 self.write_value(valty
, dest
)
1131 pub fn write_scalar(
1136 ) -> EvalResult
<'tcx
> {
1138 value
: Value
::Scalar(val
),
1141 self.write_value(valty
, dest
)
1146 ValTy { value: src_val, ty: dest_ty }
: ValTy
<'tcx
>,
1148 ) -> EvalResult
<'tcx
> {
1149 //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
1150 // Note that it is really important that the type here is the right one, and matches the type things are read at.
1151 // In case `src_val` is a `ScalarPair`, we don't do any magic here to handle padding properly, which is only
1152 // correct if we never look at this data with the wrong type.
1155 Place
::Ptr { ptr, align, extra }
=> {
1156 assert_eq
!(extra
, PlaceExtra
::None
);
1157 self.write_value_to_ptr(src_val
, ptr
, align
, dest_ty
)
1160 Place
::Local { frame, local }
=> {
1161 let dest
= self.stack
[frame
].get_local(local
)?
;
1162 self.write_value_possibly_by_val(
1164 |this
, val
| this
.stack
[frame
].set_local(local
, val
),
1172 // The cases here can be a bit subtle. Read carefully!
1173 fn write_value_possibly_by_val
<F
: FnOnce(&mut Self, Value
) -> EvalResult
<'tcx
>>(
1177 old_dest_val
: Value
,
1179 ) -> EvalResult
<'tcx
> {
1180 if let Value
::ByRef(dest_ptr
, align
) = old_dest_val
{
1181 // If the value is already `ByRef` (that is, backed by an `Allocation`),
1182 // then we must write the new value into this allocation, because there may be
1183 // other pointers into the allocation. These other pointers are logically
1184 // pointers into the local variable, and must be able to observe the change.
1186 // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1187 // knew for certain that there were no outstanding pointers to this allocation.
1188 self.write_value_to_ptr(src_val
, dest_ptr
, align
, dest_ty
)?
;
1189 } else if let Value
::ByRef(src_ptr
, align
) = src_val
{
1190 // If the value is not `ByRef`, then we know there are no pointers to it
1191 // and we can simply overwrite the `Value` in the locals array directly.
1193 // In this specific case, where the source value is `ByRef`, we must duplicate
1194 // the allocation, because this is a by-value operation. It would be incorrect
1195 // if they referred to the same allocation, since then a change to one would
1196 // implicitly change the other.
1198 // It is a valid optimization to attempt reading a primitive value out of the
1199 // source and write that into the destination without making an allocation, so
1201 if let Ok(Some(src_val
)) = self.try_read_value(src_ptr
, align
, dest_ty
) {
1202 write_dest(self, src_val
)?
;
1204 let dest_ptr
= self.alloc_ptr(dest_ty
)?
.into();
1205 let layout
= self.layout_of(dest_ty
)?
;
1206 self.memory
.copy(src_ptr
, align
.min(layout
.align
), dest_ptr
, layout
.align
, layout
.size
, false)?
;
1207 write_dest(self, Value
::ByRef(dest_ptr
, layout
.align
))?
;
1210 // Finally, we have the simple case where neither source nor destination are
1211 // `ByRef`. We may simply copy the source value over the the destintion.
1212 write_dest(self, src_val
)?
;
1217 pub fn write_value_to_ptr(
1223 ) -> EvalResult
<'tcx
> {
1224 let layout
= self.layout_of(dest_ty
)?
;
1225 trace
!("write_value_to_ptr: {:#?}, {}, {:#?}", value
, dest_ty
, layout
);
1227 Value
::ByRef(ptr
, align
) => {
1228 self.memory
.copy(ptr
, align
.min(layout
.align
), dest
, dest_align
.min(layout
.align
), layout
.size
, false)
1230 Value
::Scalar(scalar
) => {
1231 let signed
= match layout
.abi
{
1232 layout
::Abi
::Scalar(ref scal
) => match scal
.value
{
1233 layout
::Primitive
::Int(_
, signed
) => signed
,
1237 Scalar
::Bits { defined: 0, .. }
=> false,
1238 _
=> bug
!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout
),
1241 self.memory
.write_scalar(dest
, dest_align
, scalar
, layout
.size
, signed
)
1243 Value
::ScalarPair(a_val
, b_val
) => {
1244 trace
!("write_value_to_ptr valpair: {:#?}", layout
);
1245 let (a
, b
) = match layout
.abi
{
1246 layout
::Abi
::ScalarPair(ref a
, ref b
) => (&a
.value
, &b
.value
),
1247 _
=> bug
!("write_value_to_ptr: invalid ScalarPair layout: {:#?}", layout
)
1249 let (a_size
, b_size
) = (a
.size(&self), b
.size(&self));
1251 let b_offset
= a_size
.abi_align(b
.align(&self));
1252 let b_ptr
= dest
.ptr_offset(b_offset
, &self)?
.into();
1253 // TODO: What about signedess?
1254 self.memory
.write_scalar(a_ptr
, dest_align
, a_val
, a_size
, false)?
;
1255 self.memory
.write_scalar(b_ptr
, dest_align
, b_val
, b_size
, false)
1260 fn ensure_valid_value(&self, val
: Scalar
, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
> {
1262 ty
::TyBool
=> val
.to_bool().map(|_
| ()),
1264 ty
::TyChar
if ::std
::char::from_u32(val
.to_bits(Size
::from_bytes(4))?
as u32).is_none() => {
1265 err
!(InvalidChar(val
.to_bits(Size
::from_bytes(4))?
as u32 as u128
))
1272 pub fn read_value(&self, ptr
: Scalar
, align
: Align
, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
, Value
> {
1273 if let Some(val
) = self.try_read_value(ptr
, align
, ty
)?
{
1276 bug
!("primitive read failed for type: {:?}", ty
);
1280 pub(crate) fn read_ptr(
1284 pointee_ty
: Ty
<'tcx
>,
1285 ) -> EvalResult
<'tcx
, Value
> {
1286 let ptr_size
= self.memory
.pointer_size();
1287 let p
: Scalar
= self.memory
.read_ptr_sized(ptr
, ptr_align
)?
.into();
1288 if self.type_is_sized(pointee_ty
) {
1291 trace
!("reading fat pointer extra of type {}", pointee_ty
);
1292 let extra
= ptr
.offset(ptr_size
, self)?
;
1293 match self.tcx
.struct_tail(pointee_ty
).sty
{
1294 ty
::TyDynamic(..) => Ok(p
.to_value_with_vtable(
1295 self.memory
.read_ptr_sized(extra
, ptr_align
)?
.to_ptr()?
,
1297 ty
::TySlice(..) | ty
::TyStr
=> {
1300 .read_ptr_sized(extra
, ptr_align
)?
1301 .to_bits(ptr_size
)?
;
1302 Ok(p
.to_value_with_len(len
as u64, self.tcx
.tcx
))
1304 _
=> bug
!("unsized scalar ptr read from {:?}", pointee_ty
),
1309 pub fn validate_ptr_target(
1314 ) -> EvalResult
<'tcx
> {
1317 self.memory
.read_scalar(ptr
, ptr_align
, Size
::from_bytes(1))?
.to_bool()?
;
1320 let c
= self.memory
.read_scalar(ptr
, ptr_align
, Size
::from_bytes(4))?
.to_bits(Size
::from_bytes(4))?
as u32;
1321 match ::std
::char::from_u32(c
) {
1323 None
=> return err
!(InvalidChar(c
as u128
)),
1328 self.memory
.read_ptr_sized(ptr
, ptr_align
)?
;
1330 ty
::TyRef(_
, rty
, _
) |
1331 ty
::TyRawPtr(ty
::TypeAndMut { ty: rty, .. }
) => {
1332 self.read_ptr(ptr
, ptr_align
, rty
)?
;
1335 ty
::TyAdt(def
, _
) => {
1337 self.read_ptr(ptr
, ptr_align
, ty
.boxed_ty())?
;
1341 if let layout
::Abi
::Scalar(ref scalar
) = self.layout_of(ty
)?
.abi
{
1342 let size
= scalar
.value
.size(self);
1343 self.memory
.read_scalar(ptr
, ptr_align
, size
)?
;
1352 pub fn try_read_by_ref(&self, mut val
: Value
, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
, Value
> {
1353 // Convert to ByVal or ScalarPair if possible
1354 if let Value
::ByRef(ptr
, align
) = val
{
1355 if let Some(read_val
) = self.try_read_value(ptr
, align
, ty
)?
{
1362 pub fn try_read_value(&self, ptr
: Scalar
, ptr_align
: Align
, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
, Option
<Value
>> {
1363 let layout
= self.layout_of(ty
)?
;
1364 self.memory
.check_align(ptr
, ptr_align
)?
;
1366 if layout
.size
.bytes() == 0 {
1367 return Ok(Some(Value
::Scalar(Scalar
::undef())));
1370 let ptr
= ptr
.to_ptr()?
;
1372 // Not the right place to do this
1373 //self.validate_ptr_target(ptr, ptr_align, ty)?;
1376 layout
::Abi
::Scalar(..) => {
1377 let scalar
= self.memory
.read_scalar(ptr
, ptr_align
, layout
.size
)?
;
1378 Ok(Some(Value
::Scalar(scalar
)))
1380 layout
::Abi
::ScalarPair(ref a
, ref b
) => {
1381 let (a
, b
) = (&a
.value
, &b
.value
);
1382 let (a_size
, b_size
) = (a
.size(self), b
.size(self));
1384 let b_offset
= a_size
.abi_align(b
.align(self));
1385 let b_ptr
= ptr
.offset(b_offset
, self)?
.into();
1386 let a_val
= self.memory
.read_scalar(a_ptr
, ptr_align
, a_size
)?
;
1387 let b_val
= self.memory
.read_scalar(b_ptr
, ptr_align
, b_size
)?
;
1388 Ok(Some(Value
::ScalarPair(a_val
, b_val
)))
1394 pub fn frame(&self) -> &Frame
<'mir
, 'tcx
> {
1395 self.stack
.last().expect("no call frames exist")
1398 pub fn frame_mut(&mut self) -> &mut Frame
<'mir
, 'tcx
> {
1399 self.stack
.last_mut().expect("no call frames exist")
1402 pub(super) fn mir(&self) -> &'mir mir
::Mir
<'tcx
> {
1406 pub fn substs(&self) -> &'tcx Substs
<'tcx
> {
1407 if let Some(frame
) = self.stack
.last() {
1408 frame
.instance
.substs
1422 ) -> EvalResult
<'tcx
> {
1423 // A<Struct> -> A<Trait> conversion
1424 let (src_pointee_ty
, dest_pointee_ty
) = self.tcx
.struct_lockstep_tails(sty
, dty
);
1426 match (&src_pointee_ty
.sty
, &dest_pointee_ty
.sty
) {
1427 (&ty
::TyArray(_
, length
), &ty
::TySlice(_
)) => {
1428 let ptr
= self.into_ptr(src
)?
;
1429 // u64 cast is from usize to u64, which is always good
1431 value
: ptr
.to_value_with_len(length
.unwrap_usize(self.tcx
.tcx
), self.tcx
.tcx
),
1434 self.write_value(valty
, dest
)
1436 (&ty
::TyDynamic(..), &ty
::TyDynamic(..)) => {
1437 // For now, upcasts are limited to changes in marker
1438 // traits, and hence never actually require an actual
1439 // change to the vtable.
1444 self.write_value(valty
, dest
)
1446 (_
, &ty
::TyDynamic(ref data
, _
)) => {
1447 let trait_ref
= data
.principal().unwrap().with_self_ty(
1451 let trait_ref
= self.tcx
.erase_regions(&trait_ref
);
1452 let vtable
= self.get_vtable(src_pointee_ty
, trait_ref
)?
;
1453 let ptr
= self.into_ptr(src
)?
;
1455 value
: ptr
.to_value_with_vtable(vtable
),
1458 self.write_value(valty
, dest
)
1461 _
=> bug
!("invalid unsizing {:?} -> {:?}", src_ty
, dest_ty
),
1468 src_layout
: TyLayout
<'tcx
>,
1470 dst_layout
: TyLayout
<'tcx
>,
1471 ) -> EvalResult
<'tcx
> {
1472 match (&src_layout
.ty
.sty
, &dst_layout
.ty
.sty
) {
1473 (&ty
::TyRef(_
, s
, _
), &ty
::TyRef(_
, d
, _
)) |
1474 (&ty
::TyRef(_
, s
, _
), &ty
::TyRawPtr(TypeAndMut { ty: d, .. }
)) |
1475 (&ty
::TyRawPtr(TypeAndMut { ty: s, .. }
),
1476 &ty
::TyRawPtr(TypeAndMut { ty: d, .. }
)) => {
1477 self.unsize_into_ptr(src
, src_layout
.ty
, dst
, dst_layout
.ty
, s
, d
)
1479 (&ty
::TyAdt(def_a
, _
), &ty
::TyAdt(def_b
, _
)) => {
1480 assert_eq
!(def_a
, def_b
);
1481 if def_a
.is_box() || def_b
.is_box() {
1482 if !def_a
.is_box() || !def_b
.is_box() {
1483 bug
!("invalid unsizing between {:?} -> {:?}", src_layout
, dst_layout
);
1485 return self.unsize_into_ptr(
1490 src_layout
.ty
.boxed_ty(),
1491 dst_layout
.ty
.boxed_ty(),
1495 // unsizing of generic struct with pointer fields
1496 // Example: `Arc<T>` -> `Arc<Trait>`
1497 // here we need to increase the size of every &T thin ptr field to a fat ptr
1498 for i
in 0..src_layout
.fields
.count() {
1499 let (dst_f_place
, dst_field
) =
1500 self.place_field(dst
, mir
::Field
::new(i
), dst_layout
)?
;
1501 if dst_field
.is_zst() {
1504 let (src_f_value
, src_field
) = match src
{
1505 Value
::ByRef(ptr
, align
) => {
1506 let src_place
= Place
::from_scalar_ptr(ptr
, align
);
1507 let (src_f_place
, src_field
) =
1508 self.place_field(src_place
, mir
::Field
::new(i
), src_layout
)?
;
1509 (self.read_place(src_f_place
)?
, src_field
)
1511 Value
::Scalar(_
) | Value
::ScalarPair(..) => {
1512 let src_field
= src_layout
.field(&self, i
)?
;
1513 assert_eq
!(src_layout
.fields
.offset(i
).bytes(), 0);
1514 assert_eq
!(src_field
.size
, src_layout
.size
);
1518 if src_field
.ty
== dst_field
.ty
{
1519 self.write_value(ValTy
{
1524 self.unsize_into(src_f_value
, src_field
, dst_f_place
, dst_field
)?
;
1531 "unsize_into: invalid conversion: {:?} -> {:?}",
1539 pub fn dump_local(&self, place
: Place
) {
1541 if !log_enabled
!(::log
::Level
::Trace
) {
1545 Place
::Local { frame, local }
=> {
1546 let mut allocs
= Vec
::new();
1547 let mut msg
= format
!("{:?}", local
);
1548 if frame
!= self.cur_frame() {
1549 write
!(msg
, " ({} frames up)", self.cur_frame() - frame
).unwrap();
1551 write
!(msg
, ":").unwrap();
1553 match self.stack
[frame
].get_local(local
) {
1555 if let EvalErrorKind
::DeadLocal
= err
.kind
{
1556 write
!(msg
, " is dead").unwrap();
1558 panic
!("Failed to access local: {:?}", err
);
1561 Ok(Value
::ByRef(ptr
, align
)) => {
1563 Scalar
::Ptr(ptr
) => {
1564 write
!(msg
, " by align({}) ref:", align
.abi()).unwrap();
1565 allocs
.push(ptr
.alloc_id
);
1567 ptr
=> write
!(msg
, " integral by ref: {:?}", ptr
).unwrap(),
1570 Ok(Value
::Scalar(val
)) => {
1571 write
!(msg
, " {:?}", val
).unwrap();
1572 if let Scalar
::Ptr(ptr
) = val
{
1573 allocs
.push(ptr
.alloc_id
);
1576 Ok(Value
::ScalarPair(val1
, val2
)) => {
1577 write
!(msg
, " ({:?}, {:?})", val1
, val2
).unwrap();
1578 if let Scalar
::Ptr(ptr
) = val1
{
1579 allocs
.push(ptr
.alloc_id
);
1581 if let Scalar
::Ptr(ptr
) = val2
{
1582 allocs
.push(ptr
.alloc_id
);
1588 self.memory
.dump_allocs(allocs
);
1590 Place
::Ptr { ptr, align, .. }
=> {
1592 Scalar
::Ptr(ptr
) => {
1593 trace
!("by align({}) ref:", align
.abi());
1594 self.memory
.dump_alloc(ptr
.alloc_id
);
1596 ptr
=> trace
!(" integral by ref: {:?}", ptr
),
1602 /// Convenience function to ensure correct usage of locals
1603 pub fn modify_local
<F
>(&mut self, frame
: usize, local
: mir
::Local
, f
: F
) -> EvalResult
<'tcx
>
1605 F
: FnOnce(&mut Self, Value
) -> EvalResult
<'tcx
, Value
>,
1607 let val
= self.stack
[frame
].get_local(local
)?
;
1608 let new_val
= f(self, val
)?
;
1609 self.stack
[frame
].set_local(local
, new_val
)?
;
1610 // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1611 // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1612 // self.memory.deallocate(ptr)?;
1617 pub fn generate_stacktrace(&self, explicit_span
: Option
<Span
>) -> (Vec
<FrameInfo
>, Span
) {
1618 let mut last_span
= None
;
1619 let mut frames
= Vec
::new();
1620 // skip 1 because the last frame is just the environment of the constant
1621 for &Frame { instance, span, mir, block, stmt, .. }
in self.stack().iter().skip(1).rev() {
1622 // make sure we don't emit frames that are duplicates of the previous
1623 if explicit_span
== Some(span
) {
1624 last_span
= Some(span
);
1627 if let Some(last
) = last_span
{
1632 last_span
= Some(span
);
1634 let location
= if self.tcx
.def_key(instance
.def_id()).disambiguated_data
.data
== DefPathData
::ClosureExpr
{
1635 "closure".to_owned()
1637 instance
.to_string()
1639 let block
= &mir
.basic_blocks()[block
];
1640 let source_info
= if stmt
< block
.statements
.len() {
1641 block
.statements
[stmt
].source_info
1643 block
.terminator().source_info
1645 let lint_root
= match mir
.source_scope_local_data
{
1646 mir
::ClearCrossCrate
::Set(ref ivs
) => Some(ivs
[source_info
.scope
].lint_root
),
1647 mir
::ClearCrossCrate
::Clear
=> None
,
1649 frames
.push(FrameInfo { span, location, lint_root }
);
1651 trace
!("generate stacktrace: {:#?}, {:?}", frames
, explicit_span
);
1652 (frames
, self.tcx
.span
)
1655 pub fn sign_extend(&self, value
: u128
, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
, u128
> {
1656 super::sign_extend(self.tcx
.tcx
, value
, ty
)
1659 pub fn truncate(&self, value
: u128
, ty
: Ty
<'tcx
>) -> EvalResult
<'tcx
, u128
> {
1660 super::truncate(self.tcx
.tcx
, value
, ty
)
1664 impl<'mir
, 'tcx
> Frame
<'mir
, 'tcx
> {
1665 pub fn get_local(&self, local
: mir
::Local
) -> EvalResult
<'tcx
, Value
> {
1666 self.locals
[local
].ok_or_else(|| EvalErrorKind
::DeadLocal
.into())
1669 fn set_local(&mut self, local
: mir
::Local
, value
: Value
) -> EvalResult
<'tcx
> {
1670 match self.locals
[local
] {
1671 None
=> err
!(DeadLocal
),
1672 Some(ref mut local
) => {
1679 pub fn storage_live(&mut self, local
: mir
::Local
) -> Option
<Value
> {
1680 trace
!("{:?} is now live", local
);
1682 // StorageLive *always* kills the value that's currently stored
1683 mem
::replace(&mut self.locals
[local
], Some(Value
::Scalar(Scalar
::undef())))
1686 /// Returns the old value of the local
1687 pub fn storage_dead(&mut self, local
: mir
::Local
) -> Option
<Value
> {
1688 trace
!("{:?} is now dead", local
);
1690 self.locals
[local
].take()