]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/interpret/eval_context.rs
New upstream version 1.28.0~beta.14+dfsg1
[rustc.git] / src / librustc_mir / interpret / eval_context.rs
1 use std::fmt::Write;
2
3 use rustc::hir::def_id::DefId;
4 use rustc::hir::def::Def;
5 use rustc::hir::map::definitions::DefPathData;
6 use rustc::middle::const_val::ConstVal;
7 use rustc::mir;
8 use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout};
9 use rustc::ty::subst::{Subst, Substs};
10 use rustc::ty::{self, Ty, TyCtxt, TypeAndMut};
11 use rustc::ty::query::TyCtxtAt;
12 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
13 use rustc::middle::const_val::FrameInfo;
14 use syntax::codemap::{self, Span};
15 use syntax::ast::Mutability;
16 use rustc::mir::interpret::{
17 GlobalId, Value, Scalar,
18 EvalResult, EvalErrorKind, Pointer, ConstValue,
19 };
20 use std::mem;
21
22 use super::{Place, PlaceExtra, Memory,
23 HasMemory, MemoryKind,
24 Machine};
25
26 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
27 /// Stores the `Machine` instance.
28 pub machine: M,
29
30 /// The results of the type checker, from rustc.
31 pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
32
33 /// Bounds in scope for polymorphic evaluations.
34 pub param_env: ty::ParamEnv<'tcx>,
35
36 /// The virtual memory system.
37 pub memory: Memory<'a, 'mir, 'tcx, M>,
38
39 /// The virtual call stack.
40 pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
41
42 /// The maximum number of stack frames allowed
43 pub(crate) stack_limit: usize,
44
45 /// The maximum number of terminators that may be evaluated.
46 /// This prevents infinite loops and huge computations from freezing up const eval.
47 /// Remove once halting problem is solved.
48 pub(crate) terminators_remaining: usize,
49 }
50
51 /// A stack frame.
52 pub struct Frame<'mir, 'tcx: 'mir> {
53 ////////////////////////////////////////////////////////////////////////////////
54 // Function and callsite information
55 ////////////////////////////////////////////////////////////////////////////////
56 /// The MIR for the function called on this frame.
57 pub mir: &'mir mir::Mir<'tcx>,
58
59 /// The def_id and substs of the current function
60 pub instance: ty::Instance<'tcx>,
61
62 /// The span of the call site.
63 pub span: codemap::Span,
64
65 ////////////////////////////////////////////////////////////////////////////////
66 // Return place and locals
67 ////////////////////////////////////////////////////////////////////////////////
68 /// The block to return to when returning from the current stack frame
69 pub return_to_block: StackPopCleanup,
70
71 /// The location where the result of the current stack frame should be written to.
72 pub return_place: Place,
73
74 /// The list of locals for this stack frame, stored in order as
75 /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
76 /// `None` represents a local that is currently dead, while a live local
77 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
78 ///
79 /// Before being initialized, arguments are `Value::Scalar(Scalar::undef())` and other locals are `None`.
80 pub locals: IndexVec<mir::Local, Option<Value>>,
81
82 ////////////////////////////////////////////////////////////////////////////////
83 // Current position within the function
84 ////////////////////////////////////////////////////////////////////////////////
85 /// The block that is currently executed (or will be executed after the above call stacks
86 /// return).
87 pub block: mir::BasicBlock,
88
89 /// The index of the currently evaluated statement.
90 pub stmt: usize,
91 }
92
93 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
94 pub enum StackPopCleanup {
95 /// The stackframe existed to compute the initial value of a static/constant, make sure it
96 /// isn't modifyable afterwards in case of constants.
97 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
98 /// references or deallocated
99 MarkStatic(Mutability),
100 /// A regular stackframe added due to a function call will need to get forwarded to the next
101 /// block
102 Goto(mir::BasicBlock),
103 /// The main function and diverging functions have nowhere to return to
104 None,
105 }
106
107 #[derive(Copy, Clone, Debug)]
108 pub struct TyAndPacked<'tcx> {
109 pub ty: Ty<'tcx>,
110 pub packed: bool,
111 }
112
113 #[derive(Copy, Clone, Debug)]
114 pub struct ValTy<'tcx> {
115 pub value: Value,
116 pub ty: Ty<'tcx>,
117 }
118
119 impl<'tcx> ::std::ops::Deref for ValTy<'tcx> {
120 type Target = Value;
121 fn deref(&self) -> &Value {
122 &self.value
123 }
124 }
125
126 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
127 #[inline]
128 fn data_layout(&self) -> &layout::TargetDataLayout {
129 &self.tcx.data_layout
130 }
131 }
132
133 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
134 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
135 #[inline]
136 fn data_layout(&self) -> &layout::TargetDataLayout {
137 &self.tcx.data_layout
138 }
139 }
140
141 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
142 #[inline]
143 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
144 *self.tcx
145 }
146 }
147
148 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
149 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
150 #[inline]
151 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
152 *self.tcx
153 }
154 }
155
156 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
157 type Ty = Ty<'tcx>;
158 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
159
160 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
161 self.tcx.layout_of(self.param_env.and(ty))
162 .map_err(|layout| EvalErrorKind::Layout(layout).into())
163 }
164 }
165
166 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
167 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
168 type Ty = Ty<'tcx>;
169 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
170
171 #[inline]
172 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
173 (&**self).layout_of(ty)
174 }
175 }
176
177 const MAX_TERMINATORS: usize = 1_000_000;
178
179 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
180 pub fn new(
181 tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
182 param_env: ty::ParamEnv<'tcx>,
183 machine: M,
184 memory_data: M::MemoryData,
185 ) -> Self {
186 EvalContext {
187 machine,
188 tcx,
189 param_env,
190 memory: Memory::new(tcx, memory_data),
191 stack: Vec::new(),
192 stack_limit: tcx.sess.const_eval_stack_frame_limit,
193 terminators_remaining: MAX_TERMINATORS,
194 }
195 }
196
197 pub(crate) fn with_fresh_body<F: FnOnce(&mut Self) -> R, R>(&mut self, f: F) -> R {
198 let stack = mem::replace(&mut self.stack, Vec::new());
199 let terminators_remaining = mem::replace(&mut self.terminators_remaining, MAX_TERMINATORS);
200 let r = f(self);
201 self.stack = stack;
202 self.terminators_remaining = terminators_remaining;
203 r
204 }
205
206 pub fn alloc_ptr(&mut self, ty: Ty<'tcx>) -> EvalResult<'tcx, Pointer> {
207 let layout = self.layout_of(ty)?;
208 assert!(!layout.is_unsized(), "cannot alloc memory for unsized type");
209
210 self.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))
211 }
212
213 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
214 &self.memory
215 }
216
217 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
218 &mut self.memory
219 }
220
221 pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
222 &self.stack
223 }
224
225 #[inline]
226 pub fn cur_frame(&self) -> usize {
227 assert!(self.stack.len() > 0);
228 self.stack.len() - 1
229 }
230
231 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
232 let ptr = self.memory.allocate_bytes(s.as_bytes());
233 Ok(Scalar::Ptr(ptr).to_value_with_len(s.len() as u64, self.tcx.tcx))
234 }
235
236 pub fn const_value_to_value(
237 &mut self,
238 val: ConstValue<'tcx>,
239 _ty: Ty<'tcx>,
240 ) -> EvalResult<'tcx, Value> {
241 match val {
242 ConstValue::ByRef(alloc, offset) => {
243 // FIXME: Allocate new AllocId for all constants inside
244 let id = self.memory.allocate_value(alloc.clone(), Some(MemoryKind::Stack))?;
245 Ok(Value::ByRef(Pointer::new(id, offset).into(), alloc.align))
246 },
247 ConstValue::ScalarPair(a, b) => Ok(Value::ScalarPair(a, b)),
248 ConstValue::Scalar(val) => Ok(Value::Scalar(val)),
249 }
250 }
251
252 pub(super) fn const_to_value(
253 &mut self,
254 const_val: &ConstVal<'tcx>,
255 ty: Ty<'tcx>
256 ) -> EvalResult<'tcx, Value> {
257 match *const_val {
258 ConstVal::Unevaluated(def_id, substs) => {
259 let instance = self.resolve(def_id, substs)?;
260 self.read_global_as_value(GlobalId {
261 instance,
262 promoted: None,
263 }, ty)
264 }
265 ConstVal::Value(val) => self.const_value_to_value(val, ty)
266 }
267 }
268
269 pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
270 trace!("resolve: {:?}, {:#?}", def_id, substs);
271 trace!("substs: {:#?}", self.substs());
272 trace!("param_env: {:#?}", self.param_env);
273 let substs = self.tcx.subst_and_normalize_erasing_regions(
274 self.substs(),
275 self.param_env,
276 &substs,
277 );
278 ty::Instance::resolve(
279 *self.tcx,
280 self.param_env,
281 def_id,
282 substs,
283 ).ok_or_else(|| EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
284 }
285
286 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
287 ty.is_sized(self.tcx, self.param_env)
288 }
289
290 pub fn load_mir(
291 &self,
292 instance: ty::InstanceDef<'tcx>,
293 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
294 // do not continue if typeck errors occurred (can only occur in local crate)
295 let did = instance.def_id();
296 if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
297 return err!(TypeckError);
298 }
299 trace!("load mir {:?}", instance);
300 match instance {
301 ty::InstanceDef::Item(def_id) => {
302 self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
303 EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
304 )
305 }
306 _ => Ok(self.tcx.instance_mir(instance)),
307 }
308 }
309
310 pub fn monomorphize(&self, ty: Ty<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
311 // miri doesn't care about lifetimes, and will choke on some crazy ones
312 // let's simply get rid of them
313 let substituted = ty.subst(*self.tcx, substs);
314 self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
315 }
316
317 /// Return the size and aligment of the value at the given type.
318 /// Note that the value does not matter if the type is sized. For unsized types,
319 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
320 pub fn size_and_align_of_dst(
321 &mut self,
322 ty: Ty<'tcx>,
323 value: Value,
324 ) -> EvalResult<'tcx, (Size, Align)> {
325 let layout = self.layout_of(ty)?;
326 if !layout.is_unsized() {
327 Ok(layout.size_and_align())
328 } else {
329 match ty.sty {
330 ty::TyAdt(..) | ty::TyTuple(..) => {
331 // First get the size of all statically known fields.
332 // Don't use type_of::sizing_type_of because that expects t to be sized,
333 // and it also rounds up to alignment, which we want to avoid,
334 // as the unsized field's alignment could be smaller.
335 assert!(!ty.is_simd());
336 debug!("DST {} layout: {:?}", ty, layout);
337
338 let sized_size = layout.fields.offset(layout.fields.count() - 1);
339 let sized_align = layout.align;
340 debug!(
341 "DST {} statically sized prefix size: {:?} align: {:?}",
342 ty,
343 sized_size,
344 sized_align
345 );
346
347 // Recurse to get the size of the dynamically sized field (must be
348 // the last field).
349 let field_ty = layout.field(&self, layout.fields.count() - 1)?.ty;
350 let (unsized_size, unsized_align) =
351 self.size_and_align_of_dst(field_ty, value)?;
352
353 // FIXME (#26403, #27023): We should be adding padding
354 // to `sized_size` (to accommodate the `unsized_align`
355 // required of the unsized field that follows) before
356 // summing it with `sized_size`. (Note that since #26403
357 // is unfixed, we do not yet add the necessary padding
358 // here. But this is where the add would go.)
359
360 // Return the sum of sizes and max of aligns.
361 let size = sized_size + unsized_size;
362
363 // Choose max of two known alignments (combined value must
364 // be aligned according to more restrictive of the two).
365 let align = sized_align.max(unsized_align);
366
367 // Issue #27023: must add any necessary padding to `size`
368 // (to make it a multiple of `align`) before returning it.
369 //
370 // Namely, the returned size should be, in C notation:
371 //
372 // `size + ((size & (align-1)) ? align : 0)`
373 //
374 // emulated via the semi-standard fast bit trick:
375 //
376 // `(size + (align-1)) & -align`
377
378 Ok((size.abi_align(align), align))
379 }
380 ty::TyDynamic(..) => {
381 let (_, vtable) = self.into_ptr_vtable_pair(value)?;
382 // the second entry in the vtable is the dynamic size of the object.
383 self.read_size_and_align_from_vtable(vtable)
384 }
385
386 ty::TySlice(_) | ty::TyStr => {
387 let (elem_size, align) = layout.field(&self, 0)?.size_and_align();
388 let (_, len) = self.into_slice(value)?;
389 Ok((elem_size * len, align))
390 }
391
392 _ => bug!("size_of_val::<{:?}>", ty),
393 }
394 }
395 }
396
397 pub fn push_stack_frame(
398 &mut self,
399 instance: ty::Instance<'tcx>,
400 span: codemap::Span,
401 mir: &'mir mir::Mir<'tcx>,
402 return_place: Place,
403 return_to_block: StackPopCleanup,
404 ) -> EvalResult<'tcx> {
405 ::log_settings::settings().indentation += 1;
406
407 let locals = if mir.local_decls.len() > 1 {
408 let mut locals = IndexVec::from_elem(Some(Value::Scalar(Scalar::undef())), &mir.local_decls);
409 match self.tcx.describe_def(instance.def_id()) {
410 // statics and constants don't have `Storage*` statements, no need to look for them
411 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
412 _ => {
413 trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
414 for block in mir.basic_blocks() {
415 for stmt in block.statements.iter() {
416 use rustc::mir::StatementKind::{StorageDead, StorageLive};
417 match stmt.kind {
418 StorageLive(local) |
419 StorageDead(local) => locals[local] = None,
420 _ => {}
421 }
422 }
423 }
424 },
425 }
426 locals
427 } else {
428 // don't allocate at all for trivial constants
429 IndexVec::new()
430 };
431
432 self.stack.push(Frame {
433 mir,
434 block: mir::START_BLOCK,
435 return_to_block,
436 return_place,
437 locals,
438 span,
439 instance,
440 stmt: 0,
441 });
442
443 self.memory.cur_frame = self.cur_frame();
444
445 if self.stack.len() > self.stack_limit {
446 err!(StackFrameLimitReached)
447 } else {
448 Ok(())
449 }
450 }
451
452 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
453 ::log_settings::settings().indentation -= 1;
454 M::end_region(self, None)?;
455 let frame = self.stack.pop().expect(
456 "tried to pop a stack frame, but there were none",
457 );
458 if !self.stack.is_empty() {
459 // TODO: Is this the correct time to start considering these accesses as originating from the returned-to stack frame?
460 self.memory.cur_frame = self.cur_frame();
461 }
462 match frame.return_to_block {
463 StackPopCleanup::MarkStatic(mutable) => {
464 if let Place::Ptr { ptr, .. } = frame.return_place {
465 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
466 self.memory.mark_static_initialized(
467 ptr.to_ptr()?.alloc_id,
468 mutable,
469 )?
470 } else {
471 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
472 }
473 }
474 StackPopCleanup::Goto(target) => self.goto_block(target),
475 StackPopCleanup::None => {}
476 }
477 // deallocate all locals that are backed by an allocation
478 for local in frame.locals {
479 self.deallocate_local(local)?;
480 }
481
482 Ok(())
483 }
484
485 pub fn deallocate_local(&mut self, local: Option<Value>) -> EvalResult<'tcx> {
486 if let Some(Value::ByRef(ptr, _align)) = local {
487 trace!("deallocating local");
488 let ptr = ptr.to_ptr()?;
489 self.memory.dump_alloc(ptr.alloc_id);
490 self.memory.deallocate_local(ptr)?;
491 };
492 Ok(())
493 }
494
495 /// Evaluate an assignment statement.
496 ///
497 /// There is no separate `eval_rvalue` function. Instead, the code for handling each rvalue
498 /// type writes its results directly into the memory specified by the place.
499 pub(super) fn eval_rvalue_into_place(
500 &mut self,
501 rvalue: &mir::Rvalue<'tcx>,
502 place: &mir::Place<'tcx>,
503 ) -> EvalResult<'tcx> {
504 let dest = self.eval_place(place)?;
505 let dest_ty = self.place_ty(place);
506
507 use rustc::mir::Rvalue::*;
508 match *rvalue {
509 Use(ref operand) => {
510 let value = self.eval_operand(operand)?.value;
511 let valty = ValTy {
512 value,
513 ty: dest_ty,
514 };
515 self.write_value(valty, dest)?;
516 }
517
518 BinaryOp(bin_op, ref left, ref right) => {
519 let left = self.eval_operand(left)?;
520 let right = self.eval_operand(right)?;
521 self.intrinsic_overflowing(
522 bin_op,
523 left,
524 right,
525 dest,
526 dest_ty,
527 )?;
528 }
529
530 CheckedBinaryOp(bin_op, ref left, ref right) => {
531 let left = self.eval_operand(left)?;
532 let right = self.eval_operand(right)?;
533 self.intrinsic_with_overflow(
534 bin_op,
535 left,
536 right,
537 dest,
538 dest_ty,
539 )?;
540 }
541
542 UnaryOp(un_op, ref operand) => {
543 let val = self.eval_operand_to_scalar(operand)?;
544 let val = self.unary_op(un_op, val, dest_ty)?;
545 self.write_scalar(
546 dest,
547 val,
548 dest_ty,
549 )?;
550 }
551
552 Aggregate(ref kind, ref operands) => {
553 self.inc_step_counter_and_check_limit(operands.len());
554
555 let (dest, active_field_index) = match **kind {
556 mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
557 self.write_discriminant_value(dest_ty, dest, variant_index)?;
558 if adt_def.is_enum() {
559 (self.place_downcast(dest, variant_index)?, active_field_index)
560 } else {
561 (dest, active_field_index)
562 }
563 }
564 _ => (dest, None)
565 };
566
567 let layout = self.layout_of(dest_ty)?;
568 for (i, operand) in operands.iter().enumerate() {
569 let value = self.eval_operand(operand)?;
570 // Ignore zero-sized fields.
571 if !self.layout_of(value.ty)?.is_zst() {
572 let field_index = active_field_index.unwrap_or(i);
573 let (field_dest, _) = self.place_field(dest, mir::Field::new(field_index), layout)?;
574 self.write_value(value, field_dest)?;
575 }
576 }
577 }
578
579 Repeat(ref operand, _) => {
580 let (elem_ty, length) = match dest_ty.sty {
581 ty::TyArray(elem_ty, n) => (elem_ty, n.unwrap_usize(self.tcx.tcx)),
582 _ => {
583 bug!(
584 "tried to assign array-repeat to non-array type {:?}",
585 dest_ty
586 )
587 }
588 };
589 let elem_size = self.layout_of(elem_ty)?.size;
590 let value = self.eval_operand(operand)?.value;
591
592 let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align();
593
594 // FIXME: speed up repeat filling
595 for i in 0..length {
596 let elem_dest = dest.ptr_offset(elem_size * i as u64, &self)?;
597 self.write_value_to_ptr(value, elem_dest, dest_align, elem_ty)?;
598 }
599 }
600
601 Len(ref place) => {
602 // FIXME(CTFE): don't allow computing the length of arrays in const eval
603 let src = self.eval_place(place)?;
604 let ty = self.place_ty(place);
605 let (_, len) = src.elem_ty_and_len(ty, self.tcx.tcx);
606 let defined = self.memory.pointer_size().bits() as u8;
607 self.write_scalar(
608 dest,
609 Scalar::Bits {
610 bits: len as u128,
611 defined,
612 },
613 dest_ty,
614 )?;
615 }
616
617 Ref(_, _, ref place) => {
618 let src = self.eval_place(place)?;
619 // We ignore the alignment of the place here -- special handling for packed structs ends
620 // at the `&` operator.
621 let (ptr, _align, extra) = self.force_allocation(src)?.to_ptr_align_extra();
622
623 let val = match extra {
624 PlaceExtra::None => ptr.to_value(),
625 PlaceExtra::Length(len) => ptr.to_value_with_len(len, self.tcx.tcx),
626 PlaceExtra::Vtable(vtable) => ptr.to_value_with_vtable(vtable),
627 PlaceExtra::DowncastVariant(..) => {
628 bug!("attempted to take a reference to an enum downcast place")
629 }
630 };
631 let valty = ValTy {
632 value: val,
633 ty: dest_ty,
634 };
635 self.write_value(valty, dest)?;
636 }
637
638 NullaryOp(mir::NullOp::Box, ty) => {
639 let ty = self.monomorphize(ty, self.substs());
640 M::box_alloc(self, ty, dest)?;
641 }
642
643 NullaryOp(mir::NullOp::SizeOf, ty) => {
644 let ty = self.monomorphize(ty, self.substs());
645 let layout = self.layout_of(ty)?;
646 assert!(!layout.is_unsized(),
647 "SizeOf nullary MIR operator called for unsized type");
648 let defined = self.memory.pointer_size().bits() as u8;
649 self.write_scalar(
650 dest,
651 Scalar::Bits {
652 bits: layout.size.bytes() as u128,
653 defined,
654 },
655 dest_ty,
656 )?;
657 }
658
659 Cast(kind, ref operand, cast_ty) => {
660 debug_assert_eq!(self.monomorphize(cast_ty, self.substs()), dest_ty);
661 use rustc::mir::CastKind::*;
662 match kind {
663 Unsize => {
664 let src = self.eval_operand(operand)?;
665 let src_layout = self.layout_of(src.ty)?;
666 let dst_layout = self.layout_of(dest_ty)?;
667 self.unsize_into(src.value, src_layout, dest, dst_layout)?;
668 }
669
670 Misc => {
671 let src = self.eval_operand(operand)?;
672 if self.type_is_fat_ptr(src.ty) {
673 match (src.value, self.type_is_fat_ptr(dest_ty)) {
674 (Value::ByRef { .. }, _) |
675 // pointers to extern types
676 (Value::Scalar(_),_) |
677 // slices and trait objects to other slices/trait objects
678 (Value::ScalarPair(..), true) => {
679 let valty = ValTy {
680 value: src.value,
681 ty: dest_ty,
682 };
683 self.write_value(valty, dest)?;
684 }
685 // slices and trait objects to thin pointers (dropping the metadata)
686 (Value::ScalarPair(data, _), false) => {
687 let valty = ValTy {
688 value: Value::Scalar(data),
689 ty: dest_ty,
690 };
691 self.write_value(valty, dest)?;
692 }
693 }
694 } else {
695 let src_layout = self.layout_of(src.ty)?;
696 match src_layout.variants {
697 layout::Variants::Single { index } => {
698 if let Some(def) = src.ty.ty_adt_def() {
699 let discr_val = def
700 .discriminant_for_variant(*self.tcx, index)
701 .val;
702 let defined = self
703 .layout_of(dest_ty)
704 .unwrap()
705 .size
706 .bits() as u8;
707 return self.write_scalar(
708 dest,
709 Scalar::Bits {
710 bits: discr_val,
711 defined,
712 },
713 dest_ty);
714 }
715 }
716 layout::Variants::Tagged { .. } |
717 layout::Variants::NicheFilling { .. } => {},
718 }
719
720 let src_val = self.value_to_scalar(src)?;
721 let dest_val = self.cast_scalar(src_val, src.ty, dest_ty)?;
722 let valty = ValTy {
723 value: Value::Scalar(dest_val),
724 ty: dest_ty,
725 };
726 self.write_value(valty, dest)?;
727 }
728 }
729
730 ReifyFnPointer => {
731 match self.eval_operand(operand)?.ty.sty {
732 ty::TyFnDef(def_id, substs) => {
733 if self.tcx.has_attr(def_id, "rustc_args_required_const") {
734 bug!("reifying a fn ptr that requires \
735 const arguments");
736 }
737 let instance: EvalResult<'tcx, _> = ty::Instance::resolve(
738 *self.tcx,
739 self.param_env,
740 def_id,
741 substs,
742 ).ok_or_else(|| EvalErrorKind::TypeckError.into());
743 let fn_ptr = self.memory.create_fn_alloc(instance?);
744 let valty = ValTy {
745 value: Value::Scalar(fn_ptr.into()),
746 ty: dest_ty,
747 };
748 self.write_value(valty, dest)?;
749 }
750 ref other => bug!("reify fn pointer on {:?}", other),
751 }
752 }
753
754 UnsafeFnPointer => {
755 match dest_ty.sty {
756 ty::TyFnPtr(_) => {
757 let mut src = self.eval_operand(operand)?;
758 src.ty = dest_ty;
759 self.write_value(src, dest)?;
760 }
761 ref other => bug!("fn to unsafe fn cast on {:?}", other),
762 }
763 }
764
765 ClosureFnPointer => {
766 match self.eval_operand(operand)?.ty.sty {
767 ty::TyClosure(def_id, substs) => {
768 let substs = self.tcx.subst_and_normalize_erasing_regions(
769 self.substs(),
770 ty::ParamEnv::reveal_all(),
771 &substs,
772 );
773 let instance = ty::Instance::resolve_closure(
774 *self.tcx,
775 def_id,
776 substs,
777 ty::ClosureKind::FnOnce,
778 );
779 let fn_ptr = self.memory.create_fn_alloc(instance);
780 let valty = ValTy {
781 value: Value::Scalar(fn_ptr.into()),
782 ty: dest_ty,
783 };
784 self.write_value(valty, dest)?;
785 }
786 ref other => bug!("closure fn pointer on {:?}", other),
787 }
788 }
789 }
790 }
791
792 Discriminant(ref place) => {
793 let ty = self.place_ty(place);
794 let place = self.eval_place(place)?;
795 let discr_val = self.read_discriminant_value(place, ty)?;
796 let defined = self.layout_of(dest_ty).unwrap().size.bits() as u8;
797 self.write_scalar(dest, Scalar::Bits {
798 bits: discr_val,
799 defined,
800 }, dest_ty)?;
801 }
802 }
803
804 self.dump_local(dest);
805
806 Ok(())
807 }
808
809 pub(super) fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
810 match ty.sty {
811 ty::TyRawPtr(ty::TypeAndMut { ty, .. }) |
812 ty::TyRef(_, ty, _) => !self.type_is_sized(ty),
813 ty::TyAdt(def, _) if def.is_box() => !self.type_is_sized(ty.boxed_ty()),
814 _ => false,
815 }
816 }
817
818 pub(super) fn eval_operand_to_scalar(
819 &mut self,
820 op: &mir::Operand<'tcx>,
821 ) -> EvalResult<'tcx, Scalar> {
822 let valty = self.eval_operand(op)?;
823 self.value_to_scalar(valty)
824 }
825
826 pub(crate) fn operands_to_args(
827 &mut self,
828 ops: &[mir::Operand<'tcx>],
829 ) -> EvalResult<'tcx, Vec<ValTy<'tcx>>> {
830 ops.into_iter()
831 .map(|op| self.eval_operand(op))
832 .collect()
833 }
834
835 pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValTy<'tcx>> {
836 use rustc::mir::Operand::*;
837 let ty = self.monomorphize(op.ty(self.mir(), *self.tcx), self.substs());
838 match *op {
839 // FIXME: do some more logic on `move` to invalidate the old location
840 Copy(ref place) |
841 Move(ref place) => {
842 Ok(ValTy {
843 value: self.eval_and_read_place(place)?,
844 ty
845 })
846 },
847
848 Constant(ref constant) => {
849 use rustc::mir::Literal;
850 let mir::Constant { ref literal, .. } = **constant;
851 let value = match *literal {
852 Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
853
854 Literal::Promoted { index } => {
855 let instance = self.frame().instance;
856 self.read_global_as_value(GlobalId {
857 instance,
858 promoted: Some(index),
859 }, ty)?
860 }
861 };
862
863 Ok(ValTy {
864 value,
865 ty,
866 })
867 }
868 }
869 }
870
871 /// reads a tag and produces the corresponding variant index
872 pub fn read_discriminant_as_variant_index(
873 &mut self,
874 place: Place,
875 ty: Ty<'tcx>,
876 ) -> EvalResult<'tcx, usize> {
877 let layout = self.layout_of(ty)?;
878 match layout.variants {
879 ty::layout::Variants::Single { index } => Ok(index),
880 ty::layout::Variants::Tagged { .. } => {
881 let discr_val = self.read_discriminant_value(place, ty)?;
882 ty
883 .ty_adt_def()
884 .expect("tagged layout for non adt")
885 .discriminants(self.tcx.tcx)
886 .position(|var| var.val == discr_val)
887 .ok_or_else(|| EvalErrorKind::InvalidDiscriminant.into())
888 }
889 ty::layout::Variants::NicheFilling { .. } => {
890 let discr_val = self.read_discriminant_value(place, ty)?;
891 assert_eq!(discr_val as usize as u128, discr_val);
892 Ok(discr_val as usize)
893 },
894 }
895 }
896
897 pub fn read_discriminant_value(
898 &mut self,
899 place: Place,
900 ty: Ty<'tcx>,
901 ) -> EvalResult<'tcx, u128> {
902 let layout = self.layout_of(ty)?;
903 trace!("read_discriminant_value {:#?}", layout);
904 if layout.abi == layout::Abi::Uninhabited {
905 return Ok(0);
906 }
907
908 match layout.variants {
909 layout::Variants::Single { index } => {
910 let discr_val = ty.ty_adt_def().map_or(
911 index as u128,
912 |def| def.discriminant_for_variant(*self.tcx, index).val);
913 return Ok(discr_val);
914 }
915 layout::Variants::Tagged { .. } |
916 layout::Variants::NicheFilling { .. } => {},
917 }
918
919 let (discr_place, discr) = self.place_field(place, mir::Field::new(0), layout)?;
920 trace!("discr place: {:?}, {:?}", discr_place, discr);
921 let raw_discr = self.value_to_scalar(ValTy {
922 value: self.read_place(discr_place)?,
923 ty: discr.ty
924 })?;
925 let discr_val = match layout.variants {
926 layout::Variants::Single { .. } => bug!(),
927 // FIXME: should we catch invalid discriminants here?
928 layout::Variants::Tagged { .. } => {
929 if discr.ty.is_signed() {
930 let i = raw_discr.to_bits(discr.size)? as i128;
931 // going from layout tag type to typeck discriminant type
932 // requires first sign extending with the layout discriminant
933 let shift = 128 - discr.size.bits();
934 let sexted = (i << shift) >> shift;
935 // and then zeroing with the typeck discriminant type
936 let discr_ty = ty
937 .ty_adt_def().expect("tagged layout corresponds to adt")
938 .repr
939 .discr_type();
940 let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty);
941 let shift = 128 - discr_ty.size().bits();
942 let truncatee = sexted as u128;
943 (truncatee << shift) >> shift
944 } else {
945 raw_discr.to_bits(discr.size)?
946 }
947 },
948 layout::Variants::NicheFilling {
949 dataful_variant,
950 ref niche_variants,
951 niche_start,
952 ..
953 } => {
954 let variants_start = *niche_variants.start() as u128;
955 let variants_end = *niche_variants.end() as u128;
956 match raw_discr {
957 Scalar::Ptr(_) => {
958 assert!(niche_start == 0);
959 assert!(variants_start == variants_end);
960 dataful_variant as u128
961 },
962 Scalar::Bits { bits: raw_discr, defined } => {
963 if defined < discr.size.bits() as u8 {
964 return err!(ReadUndefBytes);
965 }
966 let discr = raw_discr.wrapping_sub(niche_start)
967 .wrapping_add(variants_start);
968 if variants_start <= discr && discr <= variants_end {
969 discr
970 } else {
971 dataful_variant as u128
972 }
973 },
974 }
975 }
976 };
977
978 Ok(discr_val)
979 }
980
981
982 pub fn write_discriminant_value(
983 &mut self,
984 dest_ty: Ty<'tcx>,
985 dest: Place,
986 variant_index: usize,
987 ) -> EvalResult<'tcx> {
988 let layout = self.layout_of(dest_ty)?;
989
990 match layout.variants {
991 layout::Variants::Single { index } => {
992 if index != variant_index {
993 // If the layout of an enum is `Single`, all
994 // other variants are necessarily uninhabited.
995 assert_eq!(layout.for_variant(&self, variant_index).abi,
996 layout::Abi::Uninhabited);
997 }
998 }
999 layout::Variants::Tagged { ref tag, .. } => {
1000 let discr_val = dest_ty.ty_adt_def().unwrap()
1001 .discriminant_for_variant(*self.tcx, variant_index)
1002 .val;
1003
1004 // raw discriminants for enums are isize or bigger during
1005 // their computation, but the in-memory tag is the smallest possible
1006 // representation
1007 let size = tag.value.size(self.tcx.tcx).bits();
1008 let shift = 128 - size;
1009 let discr_val = (discr_val << shift) >> shift;
1010
1011 let (discr_dest, tag) = self.place_field(dest, mir::Field::new(0), layout)?;
1012 self.write_scalar(discr_dest, Scalar::Bits {
1013 bits: discr_val,
1014 defined: size as u8,
1015 }, tag.ty)?;
1016 }
1017 layout::Variants::NicheFilling {
1018 dataful_variant,
1019 ref niche_variants,
1020 niche_start,
1021 ..
1022 } => {
1023 if variant_index != dataful_variant {
1024 let (niche_dest, niche) =
1025 self.place_field(dest, mir::Field::new(0), layout)?;
1026 let niche_value = ((variant_index - niche_variants.start()) as u128)
1027 .wrapping_add(niche_start);
1028 self.write_scalar(niche_dest, Scalar::Bits {
1029 bits: niche_value,
1030 defined: niche.size.bits() as u8,
1031 }, niche.ty)?;
1032 }
1033 }
1034 }
1035
1036 Ok(())
1037 }
1038
1039 pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1040 if self.tcx.is_static(gid.instance.def_id()).is_some() {
1041 let alloc_id = self
1042 .tcx
1043 .alloc_map
1044 .lock()
1045 .intern_static(gid.instance.def_id());
1046 let layout = self.layout_of(ty)?;
1047 return Ok(Value::ByRef(Scalar::Ptr(alloc_id.into()), layout.align))
1048 }
1049 let cv = self.const_eval(gid)?;
1050 self.const_to_value(&cv.val, ty)
1051 }
1052
1053 pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
1054 let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
1055 ty::ParamEnv::reveal_all()
1056 } else {
1057 self.param_env
1058 };
1059 self.tcx.const_eval(param_env.and(gid)).map_err(|err| EvalErrorKind::ReferencedConstant(err).into())
1060 }
1061
1062 pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
1063 let new_place = match place {
1064 Place::Local { frame, local } => {
1065 match self.stack[frame].locals[local] {
1066 None => return err!(DeadLocal),
1067 Some(Value::ByRef(ptr, align)) => {
1068 Place::Ptr {
1069 ptr,
1070 align,
1071 extra: PlaceExtra::None,
1072 }
1073 }
1074 Some(val) => {
1075 let ty = self.stack[frame].mir.local_decls[local].ty;
1076 let ty = self.monomorphize(ty, self.stack[frame].instance.substs);
1077 let layout = self.layout_of(ty)?;
1078 let ptr = self.alloc_ptr(ty)?;
1079 self.stack[frame].locals[local] =
1080 Some(Value::ByRef(ptr.into(), layout.align)); // it stays live
1081 let place = Place::from_ptr(ptr, layout.align);
1082 self.write_value(ValTy { value: val, ty }, place)?;
1083 place
1084 }
1085 }
1086 }
1087 Place::Ptr { .. } => place,
1088 };
1089 Ok(new_place)
1090 }
1091
1092 /// ensures this Value is not a ByRef
1093 pub fn follow_by_ref_value(
1094 &self,
1095 value: Value,
1096 ty: Ty<'tcx>,
1097 ) -> EvalResult<'tcx, Value> {
1098 match value {
1099 Value::ByRef(ptr, align) => {
1100 self.read_value(ptr, align, ty)
1101 }
1102 other => Ok(other),
1103 }
1104 }
1105
1106 pub fn value_to_scalar(
1107 &self,
1108 ValTy { value, ty } : ValTy<'tcx>,
1109 ) -> EvalResult<'tcx, Scalar> {
1110 match self.follow_by_ref_value(value, ty)? {
1111 Value::ByRef { .. } => bug!("follow_by_ref_value can't result in `ByRef`"),
1112
1113 Value::Scalar(scalar) => {
1114 // TODO: Do we really want insta-UB here?
1115 self.ensure_valid_value(scalar, ty)?;
1116 Ok(scalar)
1117 }
1118
1119 Value::ScalarPair(..) => bug!("value_to_scalar can't work with fat pointers"),
1120 }
1121 }
1122
1123 pub fn write_ptr(&mut self, dest: Place, val: Scalar, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
1124 let valty = ValTy {
1125 value: val.to_value(),
1126 ty: dest_ty,
1127 };
1128 self.write_value(valty, dest)
1129 }
1130
1131 pub fn write_scalar(
1132 &mut self,
1133 dest: Place,
1134 val: Scalar,
1135 dest_ty: Ty<'tcx>,
1136 ) -> EvalResult<'tcx> {
1137 let valty = ValTy {
1138 value: Value::Scalar(val),
1139 ty: dest_ty,
1140 };
1141 self.write_value(valty, dest)
1142 }
1143
1144 pub fn write_value(
1145 &mut self,
1146 ValTy { value: src_val, ty: dest_ty } : ValTy<'tcx>,
1147 dest: Place,
1148 ) -> EvalResult<'tcx> {
1149 //trace!("Writing {:?} to {:?} at type {:?}", src_val, dest, dest_ty);
1150 // Note that it is really important that the type here is the right one, and matches the type things are read at.
1151 // In case `src_val` is a `ScalarPair`, we don't do any magic here to handle padding properly, which is only
1152 // correct if we never look at this data with the wrong type.
1153
1154 match dest {
1155 Place::Ptr { ptr, align, extra } => {
1156 assert_eq!(extra, PlaceExtra::None);
1157 self.write_value_to_ptr(src_val, ptr, align, dest_ty)
1158 }
1159
1160 Place::Local { frame, local } => {
1161 let dest = self.stack[frame].get_local(local)?;
1162 self.write_value_possibly_by_val(
1163 src_val,
1164 |this, val| this.stack[frame].set_local(local, val),
1165 dest,
1166 dest_ty,
1167 )
1168 }
1169 }
1170 }
1171
1172 // The cases here can be a bit subtle. Read carefully!
1173 fn write_value_possibly_by_val<F: FnOnce(&mut Self, Value) -> EvalResult<'tcx>>(
1174 &mut self,
1175 src_val: Value,
1176 write_dest: F,
1177 old_dest_val: Value,
1178 dest_ty: Ty<'tcx>,
1179 ) -> EvalResult<'tcx> {
1180 if let Value::ByRef(dest_ptr, align) = old_dest_val {
1181 // If the value is already `ByRef` (that is, backed by an `Allocation`),
1182 // then we must write the new value into this allocation, because there may be
1183 // other pointers into the allocation. These other pointers are logically
1184 // pointers into the local variable, and must be able to observe the change.
1185 //
1186 // Thus, it would be an error to replace the `ByRef` with a `ByVal`, unless we
1187 // knew for certain that there were no outstanding pointers to this allocation.
1188 self.write_value_to_ptr(src_val, dest_ptr, align, dest_ty)?;
1189 } else if let Value::ByRef(src_ptr, align) = src_val {
1190 // If the value is not `ByRef`, then we know there are no pointers to it
1191 // and we can simply overwrite the `Value` in the locals array directly.
1192 //
1193 // In this specific case, where the source value is `ByRef`, we must duplicate
1194 // the allocation, because this is a by-value operation. It would be incorrect
1195 // if they referred to the same allocation, since then a change to one would
1196 // implicitly change the other.
1197 //
1198 // It is a valid optimization to attempt reading a primitive value out of the
1199 // source and write that into the destination without making an allocation, so
1200 // we do so here.
1201 if let Ok(Some(src_val)) = self.try_read_value(src_ptr, align, dest_ty) {
1202 write_dest(self, src_val)?;
1203 } else {
1204 let dest_ptr = self.alloc_ptr(dest_ty)?.into();
1205 let layout = self.layout_of(dest_ty)?;
1206 self.memory.copy(src_ptr, align.min(layout.align), dest_ptr, layout.align, layout.size, false)?;
1207 write_dest(self, Value::ByRef(dest_ptr, layout.align))?;
1208 }
1209 } else {
1210 // Finally, we have the simple case where neither source nor destination are
1211 // `ByRef`. We may simply copy the source value over the the destintion.
1212 write_dest(self, src_val)?;
1213 }
1214 Ok(())
1215 }
1216
1217 pub fn write_value_to_ptr(
1218 &mut self,
1219 value: Value,
1220 dest: Scalar,
1221 dest_align: Align,
1222 dest_ty: Ty<'tcx>,
1223 ) -> EvalResult<'tcx> {
1224 let layout = self.layout_of(dest_ty)?;
1225 trace!("write_value_to_ptr: {:#?}, {}, {:#?}", value, dest_ty, layout);
1226 match value {
1227 Value::ByRef(ptr, align) => {
1228 self.memory.copy(ptr, align.min(layout.align), dest, dest_align.min(layout.align), layout.size, false)
1229 }
1230 Value::Scalar(scalar) => {
1231 let signed = match layout.abi {
1232 layout::Abi::Scalar(ref scal) => match scal.value {
1233 layout::Primitive::Int(_, signed) => signed,
1234 _ => false,
1235 },
1236 _ => match scalar {
1237 Scalar::Bits { defined: 0, .. } => false,
1238 _ => bug!("write_value_to_ptr: invalid ByVal layout: {:#?}", layout),
1239 }
1240 };
1241 self.memory.write_scalar(dest, dest_align, scalar, layout.size, signed)
1242 }
1243 Value::ScalarPair(a_val, b_val) => {
1244 trace!("write_value_to_ptr valpair: {:#?}", layout);
1245 let (a, b) = match layout.abi {
1246 layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
1247 _ => bug!("write_value_to_ptr: invalid ScalarPair layout: {:#?}", layout)
1248 };
1249 let (a_size, b_size) = (a.size(&self), b.size(&self));
1250 let a_ptr = dest;
1251 let b_offset = a_size.abi_align(b.align(&self));
1252 let b_ptr = dest.ptr_offset(b_offset, &self)?.into();
1253 // TODO: What about signedess?
1254 self.memory.write_scalar(a_ptr, dest_align, a_val, a_size, false)?;
1255 self.memory.write_scalar(b_ptr, dest_align, b_val, b_size, false)
1256 }
1257 }
1258 }
1259
1260 fn ensure_valid_value(&self, val: Scalar, ty: Ty<'tcx>) -> EvalResult<'tcx> {
1261 match ty.sty {
1262 ty::TyBool => val.to_bool().map(|_| ()),
1263
1264 ty::TyChar if ::std::char::from_u32(val.to_bits(Size::from_bytes(4))? as u32).is_none() => {
1265 err!(InvalidChar(val.to_bits(Size::from_bytes(4))? as u32 as u128))
1266 }
1267
1268 _ => Ok(()),
1269 }
1270 }
1271
1272 pub fn read_value(&self, ptr: Scalar, align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1273 if let Some(val) = self.try_read_value(ptr, align, ty)? {
1274 Ok(val)
1275 } else {
1276 bug!("primitive read failed for type: {:?}", ty);
1277 }
1278 }
1279
1280 pub(crate) fn read_ptr(
1281 &self,
1282 ptr: Pointer,
1283 ptr_align: Align,
1284 pointee_ty: Ty<'tcx>,
1285 ) -> EvalResult<'tcx, Value> {
1286 let ptr_size = self.memory.pointer_size();
1287 let p: Scalar = self.memory.read_ptr_sized(ptr, ptr_align)?.into();
1288 if self.type_is_sized(pointee_ty) {
1289 Ok(p.to_value())
1290 } else {
1291 trace!("reading fat pointer extra of type {}", pointee_ty);
1292 let extra = ptr.offset(ptr_size, self)?;
1293 match self.tcx.struct_tail(pointee_ty).sty {
1294 ty::TyDynamic(..) => Ok(p.to_value_with_vtable(
1295 self.memory.read_ptr_sized(extra, ptr_align)?.to_ptr()?,
1296 )),
1297 ty::TySlice(..) | ty::TyStr => {
1298 let len = self
1299 .memory
1300 .read_ptr_sized(extra, ptr_align)?
1301 .to_bits(ptr_size)?;
1302 Ok(p.to_value_with_len(len as u64, self.tcx.tcx))
1303 },
1304 _ => bug!("unsized scalar ptr read from {:?}", pointee_ty),
1305 }
1306 }
1307 }
1308
1309 pub fn validate_ptr_target(
1310 &self,
1311 ptr: Pointer,
1312 ptr_align: Align,
1313 ty: Ty<'tcx>
1314 ) -> EvalResult<'tcx> {
1315 match ty.sty {
1316 ty::TyBool => {
1317 self.memory.read_scalar(ptr, ptr_align, Size::from_bytes(1))?.to_bool()?;
1318 }
1319 ty::TyChar => {
1320 let c = self.memory.read_scalar(ptr, ptr_align, Size::from_bytes(4))?.to_bits(Size::from_bytes(4))? as u32;
1321 match ::std::char::from_u32(c) {
1322 Some(..) => (),
1323 None => return err!(InvalidChar(c as u128)),
1324 }
1325 }
1326
1327 ty::TyFnPtr(_) => {
1328 self.memory.read_ptr_sized(ptr, ptr_align)?;
1329 },
1330 ty::TyRef(_, rty, _) |
1331 ty::TyRawPtr(ty::TypeAndMut { ty: rty, .. }) => {
1332 self.read_ptr(ptr, ptr_align, rty)?;
1333 }
1334
1335 ty::TyAdt(def, _) => {
1336 if def.is_box() {
1337 self.read_ptr(ptr, ptr_align, ty.boxed_ty())?;
1338 return Ok(());
1339 }
1340
1341 if let layout::Abi::Scalar(ref scalar) = self.layout_of(ty)?.abi {
1342 let size = scalar.value.size(self);
1343 self.memory.read_scalar(ptr, ptr_align, size)?;
1344 }
1345 }
1346
1347 _ => (),
1348 }
1349 Ok(())
1350 }
1351
1352 pub fn try_read_by_ref(&self, mut val: Value, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
1353 // Convert to ByVal or ScalarPair if possible
1354 if let Value::ByRef(ptr, align) = val {
1355 if let Some(read_val) = self.try_read_value(ptr, align, ty)? {
1356 val = read_val;
1357 }
1358 }
1359 Ok(val)
1360 }
1361
1362 pub fn try_read_value(&self, ptr: Scalar, ptr_align: Align, ty: Ty<'tcx>) -> EvalResult<'tcx, Option<Value>> {
1363 let layout = self.layout_of(ty)?;
1364 self.memory.check_align(ptr, ptr_align)?;
1365
1366 if layout.size.bytes() == 0 {
1367 return Ok(Some(Value::Scalar(Scalar::undef())));
1368 }
1369
1370 let ptr = ptr.to_ptr()?;
1371
1372 // Not the right place to do this
1373 //self.validate_ptr_target(ptr, ptr_align, ty)?;
1374
1375 match layout.abi {
1376 layout::Abi::Scalar(..) => {
1377 let scalar = self.memory.read_scalar(ptr, ptr_align, layout.size)?;
1378 Ok(Some(Value::Scalar(scalar)))
1379 }
1380 layout::Abi::ScalarPair(ref a, ref b) => {
1381 let (a, b) = (&a.value, &b.value);
1382 let (a_size, b_size) = (a.size(self), b.size(self));
1383 let a_ptr = ptr;
1384 let b_offset = a_size.abi_align(b.align(self));
1385 let b_ptr = ptr.offset(b_offset, self)?.into();
1386 let a_val = self.memory.read_scalar(a_ptr, ptr_align, a_size)?;
1387 let b_val = self.memory.read_scalar(b_ptr, ptr_align, b_size)?;
1388 Ok(Some(Value::ScalarPair(a_val, b_val)))
1389 }
1390 _ => Ok(None),
1391 }
1392 }
1393
1394 pub fn frame(&self) -> &Frame<'mir, 'tcx> {
1395 self.stack.last().expect("no call frames exist")
1396 }
1397
1398 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
1399 self.stack.last_mut().expect("no call frames exist")
1400 }
1401
1402 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
1403 self.frame().mir
1404 }
1405
1406 pub fn substs(&self) -> &'tcx Substs<'tcx> {
1407 if let Some(frame) = self.stack.last() {
1408 frame.instance.substs
1409 } else {
1410 Substs::empty()
1411 }
1412 }
1413
1414 fn unsize_into_ptr(
1415 &mut self,
1416 src: Value,
1417 src_ty: Ty<'tcx>,
1418 dest: Place,
1419 dest_ty: Ty<'tcx>,
1420 sty: Ty<'tcx>,
1421 dty: Ty<'tcx>,
1422 ) -> EvalResult<'tcx> {
1423 // A<Struct> -> A<Trait> conversion
1424 let (src_pointee_ty, dest_pointee_ty) = self.tcx.struct_lockstep_tails(sty, dty);
1425
1426 match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
1427 (&ty::TyArray(_, length), &ty::TySlice(_)) => {
1428 let ptr = self.into_ptr(src)?;
1429 // u64 cast is from usize to u64, which is always good
1430 let valty = ValTy {
1431 value: ptr.to_value_with_len(length.unwrap_usize(self.tcx.tcx), self.tcx.tcx),
1432 ty: dest_ty,
1433 };
1434 self.write_value(valty, dest)
1435 }
1436 (&ty::TyDynamic(..), &ty::TyDynamic(..)) => {
1437 // For now, upcasts are limited to changes in marker
1438 // traits, and hence never actually require an actual
1439 // change to the vtable.
1440 let valty = ValTy {
1441 value: src,
1442 ty: dest_ty,
1443 };
1444 self.write_value(valty, dest)
1445 }
1446 (_, &ty::TyDynamic(ref data, _)) => {
1447 let trait_ref = data.principal().unwrap().with_self_ty(
1448 *self.tcx,
1449 src_pointee_ty,
1450 );
1451 let trait_ref = self.tcx.erase_regions(&trait_ref);
1452 let vtable = self.get_vtable(src_pointee_ty, trait_ref)?;
1453 let ptr = self.into_ptr(src)?;
1454 let valty = ValTy {
1455 value: ptr.to_value_with_vtable(vtable),
1456 ty: dest_ty,
1457 };
1458 self.write_value(valty, dest)
1459 }
1460
1461 _ => bug!("invalid unsizing {:?} -> {:?}", src_ty, dest_ty),
1462 }
1463 }
1464
1465 fn unsize_into(
1466 &mut self,
1467 src: Value,
1468 src_layout: TyLayout<'tcx>,
1469 dst: Place,
1470 dst_layout: TyLayout<'tcx>,
1471 ) -> EvalResult<'tcx> {
1472 match (&src_layout.ty.sty, &dst_layout.ty.sty) {
1473 (&ty::TyRef(_, s, _), &ty::TyRef(_, d, _)) |
1474 (&ty::TyRef(_, s, _), &ty::TyRawPtr(TypeAndMut { ty: d, .. })) |
1475 (&ty::TyRawPtr(TypeAndMut { ty: s, .. }),
1476 &ty::TyRawPtr(TypeAndMut { ty: d, .. })) => {
1477 self.unsize_into_ptr(src, src_layout.ty, dst, dst_layout.ty, s, d)
1478 }
1479 (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
1480 assert_eq!(def_a, def_b);
1481 if def_a.is_box() || def_b.is_box() {
1482 if !def_a.is_box() || !def_b.is_box() {
1483 bug!("invalid unsizing between {:?} -> {:?}", src_layout, dst_layout);
1484 }
1485 return self.unsize_into_ptr(
1486 src,
1487 src_layout.ty,
1488 dst,
1489 dst_layout.ty,
1490 src_layout.ty.boxed_ty(),
1491 dst_layout.ty.boxed_ty(),
1492 );
1493 }
1494
1495 // unsizing of generic struct with pointer fields
1496 // Example: `Arc<T>` -> `Arc<Trait>`
1497 // here we need to increase the size of every &T thin ptr field to a fat ptr
1498 for i in 0..src_layout.fields.count() {
1499 let (dst_f_place, dst_field) =
1500 self.place_field(dst, mir::Field::new(i), dst_layout)?;
1501 if dst_field.is_zst() {
1502 continue;
1503 }
1504 let (src_f_value, src_field) = match src {
1505 Value::ByRef(ptr, align) => {
1506 let src_place = Place::from_scalar_ptr(ptr, align);
1507 let (src_f_place, src_field) =
1508 self.place_field(src_place, mir::Field::new(i), src_layout)?;
1509 (self.read_place(src_f_place)?, src_field)
1510 }
1511 Value::Scalar(_) | Value::ScalarPair(..) => {
1512 let src_field = src_layout.field(&self, i)?;
1513 assert_eq!(src_layout.fields.offset(i).bytes(), 0);
1514 assert_eq!(src_field.size, src_layout.size);
1515 (src, src_field)
1516 }
1517 };
1518 if src_field.ty == dst_field.ty {
1519 self.write_value(ValTy {
1520 value: src_f_value,
1521 ty: src_field.ty,
1522 }, dst_f_place)?;
1523 } else {
1524 self.unsize_into(src_f_value, src_field, dst_f_place, dst_field)?;
1525 }
1526 }
1527 Ok(())
1528 }
1529 _ => {
1530 bug!(
1531 "unsize_into: invalid conversion: {:?} -> {:?}",
1532 src_layout,
1533 dst_layout
1534 )
1535 }
1536 }
1537 }
1538
1539 pub fn dump_local(&self, place: Place) {
1540 // Debug output
1541 if !log_enabled!(::log::Level::Trace) {
1542 return;
1543 }
1544 match place {
1545 Place::Local { frame, local } => {
1546 let mut allocs = Vec::new();
1547 let mut msg = format!("{:?}", local);
1548 if frame != self.cur_frame() {
1549 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
1550 }
1551 write!(msg, ":").unwrap();
1552
1553 match self.stack[frame].get_local(local) {
1554 Err(err) => {
1555 if let EvalErrorKind::DeadLocal = err.kind {
1556 write!(msg, " is dead").unwrap();
1557 } else {
1558 panic!("Failed to access local: {:?}", err);
1559 }
1560 }
1561 Ok(Value::ByRef(ptr, align)) => {
1562 match ptr {
1563 Scalar::Ptr(ptr) => {
1564 write!(msg, " by align({}) ref:", align.abi()).unwrap();
1565 allocs.push(ptr.alloc_id);
1566 }
1567 ptr => write!(msg, " integral by ref: {:?}", ptr).unwrap(),
1568 }
1569 }
1570 Ok(Value::Scalar(val)) => {
1571 write!(msg, " {:?}", val).unwrap();
1572 if let Scalar::Ptr(ptr) = val {
1573 allocs.push(ptr.alloc_id);
1574 }
1575 }
1576 Ok(Value::ScalarPair(val1, val2)) => {
1577 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
1578 if let Scalar::Ptr(ptr) = val1 {
1579 allocs.push(ptr.alloc_id);
1580 }
1581 if let Scalar::Ptr(ptr) = val2 {
1582 allocs.push(ptr.alloc_id);
1583 }
1584 }
1585 }
1586
1587 trace!("{}", msg);
1588 self.memory.dump_allocs(allocs);
1589 }
1590 Place::Ptr { ptr, align, .. } => {
1591 match ptr {
1592 Scalar::Ptr(ptr) => {
1593 trace!("by align({}) ref:", align.abi());
1594 self.memory.dump_alloc(ptr.alloc_id);
1595 }
1596 ptr => trace!(" integral by ref: {:?}", ptr),
1597 }
1598 }
1599 }
1600 }
1601
1602 /// Convenience function to ensure correct usage of locals
1603 pub fn modify_local<F>(&mut self, frame: usize, local: mir::Local, f: F) -> EvalResult<'tcx>
1604 where
1605 F: FnOnce(&mut Self, Value) -> EvalResult<'tcx, Value>,
1606 {
1607 let val = self.stack[frame].get_local(local)?;
1608 let new_val = f(self, val)?;
1609 self.stack[frame].set_local(local, new_val)?;
1610 // FIXME(solson): Run this when setting to Undef? (See previous version of this code.)
1611 // if let Value::ByRef(ptr) = self.stack[frame].get_local(local) {
1612 // self.memory.deallocate(ptr)?;
1613 // }
1614 Ok(())
1615 }
1616
1617 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
1618 let mut last_span = None;
1619 let mut frames = Vec::new();
1620 // skip 1 because the last frame is just the environment of the constant
1621 for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().skip(1).rev() {
1622 // make sure we don't emit frames that are duplicates of the previous
1623 if explicit_span == Some(span) {
1624 last_span = Some(span);
1625 continue;
1626 }
1627 if let Some(last) = last_span {
1628 if last == span {
1629 continue;
1630 }
1631 } else {
1632 last_span = Some(span);
1633 }
1634 let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
1635 "closure".to_owned()
1636 } else {
1637 instance.to_string()
1638 };
1639 let block = &mir.basic_blocks()[block];
1640 let source_info = if stmt < block.statements.len() {
1641 block.statements[stmt].source_info
1642 } else {
1643 block.terminator().source_info
1644 };
1645 let lint_root = match mir.source_scope_local_data {
1646 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
1647 mir::ClearCrossCrate::Clear => None,
1648 };
1649 frames.push(FrameInfo { span, location, lint_root });
1650 }
1651 trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
1652 (frames, self.tcx.span)
1653 }
1654
1655 pub fn sign_extend(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1656 super::sign_extend(self.tcx.tcx, value, ty)
1657 }
1658
1659 pub fn truncate(&self, value: u128, ty: Ty<'tcx>) -> EvalResult<'tcx, u128> {
1660 super::truncate(self.tcx.tcx, value, ty)
1661 }
1662 }
1663
1664 impl<'mir, 'tcx> Frame<'mir, 'tcx> {
1665 pub fn get_local(&self, local: mir::Local) -> EvalResult<'tcx, Value> {
1666 self.locals[local].ok_or_else(|| EvalErrorKind::DeadLocal.into())
1667 }
1668
1669 fn set_local(&mut self, local: mir::Local, value: Value) -> EvalResult<'tcx> {
1670 match self.locals[local] {
1671 None => err!(DeadLocal),
1672 Some(ref mut local) => {
1673 *local = value;
1674 Ok(())
1675 }
1676 }
1677 }
1678
1679 pub fn storage_live(&mut self, local: mir::Local) -> Option<Value> {
1680 trace!("{:?} is now live", local);
1681
1682 // StorageLive *always* kills the value that's currently stored
1683 mem::replace(&mut self.locals[local], Some(Value::Scalar(Scalar::undef())))
1684 }
1685
1686 /// Returns the old value of the local
1687 pub fn storage_dead(&mut self, local: mir::Local) -> Option<Value> {
1688 trace!("{:?} is now dead", local);
1689
1690 self.locals[local].take()
1691 }
1692 }