4 use rustc
::ty
::Instance
;
5 use rustc
::ty
::layout
::{self, TyLayout, LayoutOf}
;
6 use syntax
::source_map
::Span
;
7 use rustc_target
::spec
::abi
::Abi
;
10 InterpResult
, PointerArithmetic
,
11 InterpCx
, Machine
, OpTy
, ImmTy
, PlaceTy
, MPlaceTy
, StackPopCleanup
, FnVal
,
14 impl<'mir
, 'tcx
, M
: Machine
<'mir
, 'tcx
>> InterpCx
<'mir
, 'tcx
, M
> {
16 pub fn goto_block(&mut self, target
: Option
<mir
::BasicBlock
>) -> InterpResult
<'tcx
> {
17 if let Some(target
) = target
{
18 self.frame_mut().block
= target
;
19 self.frame_mut().stmt
= 0;
22 throw_ub
!(Unreachable
)
26 pub(super) fn eval_terminator(
28 terminator
: &mir
::Terminator
<'tcx
>,
29 ) -> InterpResult
<'tcx
> {
30 use rustc
::mir
::TerminatorKind
::*;
31 match terminator
.kind
{
33 self.frame().return_place
.map(|r
| self.dump_place(*r
));
34 self.pop_stack_frame()?
37 Goto { target }
=> self.goto_block(Some(target
))?
,
45 let discr
= self.read_immediate(self.eval_operand(discr
, None
)?
)?
;
46 trace
!("SwitchInt({:?})", *discr
);
48 // Branch to the `otherwise` case by default, if no match is found.
49 let mut target_block
= targets
[targets
.len() - 1];
51 for (index
, &const_int
) in values
.iter().enumerate() {
52 // Compare using binary_op, to also support pointer values
53 let res
= self.overflowing_binary_op(mir
::BinOp
::Eq
,
55 ImmTy
::from_uint(const_int
, discr
.layout
),
58 target_block
= targets
[index
];
63 self.goto_block(Some(target_block
))?
;
72 let (dest
, ret
) = match *destination
{
73 Some((ref lv
, target
)) => (Some(self.eval_place(lv
)?
), Some(target
)),
77 let func
= self.eval_operand(func
, None
)?
;
78 let (fn_val
, abi
) = match func
.layout
.ty
.kind
{
80 let caller_abi
= sig
.abi();
81 let fn_ptr
= self.read_scalar(func
)?
.not_undef()?
;
82 let fn_val
= self.memory
.get_fn(fn_ptr
)?
;
85 ty
::FnDef(def_id
, substs
) => {
86 let sig
= func
.layout
.ty
.fn_sig(*self.tcx
);
87 (FnVal
::Instance(self.resolve(def_id
, substs
)?
), sig
.abi())
90 bug
!("invalid callee of type {:?}", func
.layout
.ty
)
93 let args
= self.eval_operands(args
)?
;
96 terminator
.source_info
.span
,
109 // FIXME(CTFE): forbid drop in const eval
110 let place
= self.eval_place(location
)?
;
111 let ty
= place
.layout
.ty
;
112 trace
!("TerminatorKind::drop: {:?}, type {}", location
, ty
);
114 let instance
= Instance
::resolve_drop_in_place(*self.tcx
, ty
);
118 terminator
.source_info
.span
,
130 let cond_val
= self.read_immediate(self.eval_operand(cond
, None
)?
)?
131 .to_scalar()?
.to_bool()?
;
132 if expected
== cond_val
{
133 self.goto_block(Some(target
))?
;
135 // Compute error message
136 use rustc
::mir
::interpret
::PanicInfo
::*;
137 return Err(match msg
{
138 BoundsCheck { ref len, ref index }
=> {
140 .read_immediate(self.eval_operand(len
, None
)?
)
141 .expect("can't eval len")
143 .to_bits(self.memory
.pointer_size())?
as u64;
145 .read_immediate(self.eval_operand(index
, None
)?
)
146 .expect("can't eval index")
148 .to_bits(self.memory
.pointer_size())?
as u64;
149 err_panic
!(BoundsCheck { len, index }
)
151 Overflow(op
) => err_panic
!(Overflow(*op
)),
152 OverflowNeg
=> err_panic
!(OverflowNeg
),
153 DivisionByZero
=> err_panic
!(DivisionByZero
),
154 RemainderByZero
=> err_panic
!(RemainderByZero
),
155 GeneratorResumedAfterReturn
=> err_panic
!(GeneratorResumedAfterReturn
),
156 GeneratorResumedAfterPanic
=> err_panic
!(GeneratorResumedAfterPanic
),
157 Panic { .. }
=> bug
!("`Panic` variant cannot occur in MIR"),
165 DropAndReplace { .. }
|
167 Abort
=> unimplemented
!("{:#?}", terminator
.kind
),
168 FalseEdges { .. }
=> bug
!("should have been eliminated by\
169 `simplify_branches` mir pass"),
170 FalseUnwind { .. }
=> bug
!("should have been eliminated by\
171 `simplify_branches` mir pass"),
172 Unreachable
=> throw_ub
!(Unreachable
),
178 fn check_argument_compat(
180 caller
: TyLayout
<'tcx
>,
181 callee
: TyLayout
<'tcx
>,
183 if caller
.ty
== callee
.ty
{
188 // Don't risk anything
192 match (&caller
.abi
, &callee
.abi
) {
193 // Different valid ranges are okay (once we enforce validity,
194 // that will take care to make it UB to leave the range, just
195 // like for transmute).
196 (layout
::Abi
::Scalar(ref caller
), layout
::Abi
::Scalar(ref callee
)) =>
197 caller
.value
== callee
.value
,
198 (layout
::Abi
::ScalarPair(ref caller1
, ref caller2
),
199 layout
::Abi
::ScalarPair(ref callee1
, ref callee2
)) =>
200 caller1
.value
== callee1
.value
&& caller2
.value
== callee2
.value
,
206 /// Pass a single argument, checking the types for compatibility.
210 caller_arg
: &mut impl Iterator
<Item
=OpTy
<'tcx
, M
::PointerTag
>>,
211 callee_arg
: PlaceTy
<'tcx
, M
::PointerTag
>,
212 ) -> InterpResult
<'tcx
> {
213 if rust_abi
&& callee_arg
.layout
.is_zst() {
215 trace
!("Skipping callee ZST");
218 let caller_arg
= caller_arg
.next()
219 .ok_or_else(|| err_unsup
!(FunctionArgCountMismatch
)) ?
;
221 debug_assert
!(!caller_arg
.layout
.is_zst(), "ZSTs must have been already filtered out");
224 if !Self::check_argument_compat(rust_abi
, caller_arg
.layout
, callee_arg
.layout
) {
225 throw_unsup
!(FunctionArgMismatch(caller_arg
.layout
.ty
, callee_arg
.layout
.ty
))
227 // We allow some transmutes here
228 self.copy_op_transmute(caller_arg
, callee_arg
)
231 /// Call this function -- pushing the stack frame and initializing the arguments.
234 fn_val
: FnVal
<'tcx
, M
::ExtraFnVal
>,
237 args
: &[OpTy
<'tcx
, M
::PointerTag
>],
238 dest
: Option
<PlaceTy
<'tcx
, M
::PointerTag
>>,
239 ret
: Option
<mir
::BasicBlock
>,
240 ) -> InterpResult
<'tcx
> {
241 trace
!("eval_fn_call: {:#?}", fn_val
);
243 let instance
= match fn_val
{
244 FnVal
::Instance(instance
) => instance
,
245 FnVal
::Other(extra
) => {
246 return M
::call_extra_fn(self, extra
, args
, dest
, ret
);
251 ty
::InstanceDef
::Intrinsic(..) => {
252 // The intrinsic itself cannot diverge, so if we got here without a return
253 // place... (can happen e.g., for transmute returning `!`)
254 let dest
= match dest
{
256 None
=> throw_ub
!(Unreachable
)
258 M
::call_intrinsic(self, span
, instance
, args
, dest
)?
;
259 // No stack frame gets pushed, the main loop will just act as if the
261 self.goto_block(ret
)?
;
262 self.dump_place(*dest
);
265 ty
::InstanceDef
::VtableShim(..) |
266 ty
::InstanceDef
::ReifyShim(..) |
267 ty
::InstanceDef
::ClosureOnceShim { .. }
|
268 ty
::InstanceDef
::FnPtrShim(..) |
269 ty
::InstanceDef
::DropGlue(..) |
270 ty
::InstanceDef
::CloneShim(..) |
271 ty
::InstanceDef
::Item(_
) => {
275 let instance_ty
= instance
.ty(*self.tcx
);
276 match instance_ty
.kind
{
278 instance_ty
.fn_sig(*self.tcx
).abi(),
279 ty
::Closure(..) => Abi
::RustCall
,
280 ty
::Generator(..) => Abi
::Rust
,
281 _
=> bug
!("unexpected callee ty: {:?}", instance_ty
),
284 let normalize_abi
= |abi
| match abi
{
285 Abi
::Rust
| Abi
::RustCall
| Abi
::RustIntrinsic
| Abi
::PlatformIntrinsic
=>
286 // These are all the same ABI, really.
291 if normalize_abi(caller_abi
) != normalize_abi(callee_abi
) {
292 throw_unsup
!(FunctionAbiMismatch(caller_abi
, callee_abi
))
296 // We need MIR for this fn
297 let body
= match M
::find_fn(self, instance
, args
, dest
, ret
)?
{
299 None
=> return Ok(()),
302 self.push_stack_frame(
307 StackPopCleanup
::Goto(ret
),
310 // We want to pop this frame again in case there was an error, to put
311 // the blame in the right location. Until the 2018 edition is used in
312 // the compiler, we have to do this with an immediately invoked function.
315 "caller ABI: {:?}, args: {:#?}",
318 .map(|arg
| (arg
.layout
.ty
, format
!("{:?}", **arg
)))
322 "spread_arg: {:?}, locals: {:#?}",
326 (local
, self.layout_of_local(self.frame(), local
, None
).unwrap().ty
)
331 // Figure out how to pass which arguments.
332 // The Rust ABI is special: ZST get skipped.
333 let rust_abi
= match caller_abi
{
334 Abi
::Rust
| Abi
::RustCall
=> true,
337 // We have two iterators: Where the arguments come from,
338 // and where they go to.
340 // For where they come from: If the ABI is RustCall, we untuple the
341 // last incoming argument. These two iterators do not have the same type,
342 // so to keep the code paths uniform we accept an allocation
343 // (for RustCall ABI only).
344 let caller_args
: Cow
<'_
, [OpTy
<'tcx
, M
::PointerTag
>]> =
345 if caller_abi
== Abi
::RustCall
&& !args
.is_empty() {
347 let (&untuple_arg
, args
) = args
.split_last().unwrap();
348 trace
!("eval_fn_call: Will pass last argument by untupling");
349 Cow
::from(args
.iter().map(|&a
| Ok(a
))
350 .chain((0..untuple_arg
.layout
.fields
.count()).into_iter()
351 .map(|i
| self.operand_field(untuple_arg
, i
as u64))
353 .collect
::<InterpResult
<'_
, Vec
<OpTy
<'tcx
, M
::PointerTag
>>>>()?
)
359 let mut caller_iter
= caller_args
.iter()
360 .filter(|op
| !rust_abi
|| !op
.layout
.is_zst())
363 // Now we have to spread them out across the callee's locals,
364 // taking into account the `spread_arg`. If we could write
365 // this is a single iterator (that handles `spread_arg`), then
366 // `pass_argument` would be the loop body. It takes care to
367 // not advance `caller_iter` for ZSTs.
368 let mut locals_iter
= body
.args_iter();
369 while let Some(local
) = locals_iter
.next() {
370 let dest
= self.eval_place(
371 &mir
::Place
::from(local
)
373 if Some(local
) == body
.spread_arg
{
375 for i
in 0..dest
.layout
.fields
.count() {
376 let dest
= self.place_field(dest
, i
as u64)?
;
377 self.pass_argument(rust_abi
, &mut caller_iter
, dest
)?
;
381 self.pass_argument(rust_abi
, &mut caller_iter
, dest
)?
;
384 // Now we should have no more caller args
385 if caller_iter
.next().is_some() {
386 trace
!("Caller has passed too many args");
387 throw_unsup
!(FunctionArgCountMismatch
)
389 // Don't forget to check the return type!
390 if let Some(caller_ret
) = dest
{
391 let callee_ret
= self.eval_place(
392 &mir
::Place
::return_place()
394 if !Self::check_argument_compat(
400 FunctionRetMismatch(caller_ret
.layout
.ty
, callee_ret
.layout
.ty
)
404 let local
= mir
::RETURN_PLACE
;
405 let callee_layout
= self.layout_of_local(self.frame(), local
, None
)?
;
406 if !callee_layout
.abi
.is_uninhabited() {
407 throw_unsup
!(FunctionRetMismatch(
408 self.tcx
.types
.never
, callee_layout
.ty
422 // cannot use the shim here, because that will only result in infinite recursion
423 ty
::InstanceDef
::Virtual(_
, idx
) => {
424 let mut args
= args
.to_vec();
425 let ptr_size
= self.pointer_size();
426 // We have to implement all "object safe receivers". Currently we
427 // support built-in pointers (&, &mut, Box) as well as unsized-self. We do
428 // not yet support custom self types.
429 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
430 let receiver_place
= match args
[0].layout
.ty
.builtin_deref(true) {
433 self.deref_operand(args
[0])?
437 args
[0].assert_mem_place()
440 // Find and consult vtable
441 let vtable
= receiver_place
.vtable();
442 let vtable_slot
= vtable
.ptr_offset(ptr_size
* (idx
as u64 + 3), self)?
;
443 let vtable_slot
= self.memory
.check_ptr_access(
446 self.tcx
.data_layout
.pointer_align
.abi
,
447 )?
.expect("cannot be a ZST");
448 let fn_ptr
= self.memory
.get(vtable_slot
.alloc_id
)?
449 .read_ptr_sized(self, vtable_slot
)?
.not_undef()?
;
450 let drop_fn
= self.memory
.get_fn(fn_ptr
)?
;
452 // `*mut receiver_place.layout.ty` is almost the layout that we
453 // want for args[0]: We have to project to field 0 because we want
455 assert
!(receiver_place
.layout
.is_unsized());
456 let receiver_ptr_ty
= self.tcx
.mk_mut_ptr(receiver_place
.layout
.ty
);
457 let this_receiver_ptr
= self.layout_of(receiver_ptr_ty
)?
.field(self, 0)?
;
458 // Adjust receiver argument.
459 args
[0] = OpTy
::from(ImmTy
{
460 layout
: this_receiver_ptr
,
461 imm
: receiver_place
.ptr
.into()
463 trace
!("Patched self operand to {:#?}", args
[0]);
464 // recurse with concrete function
465 self.eval_fn_call(drop_fn
, span
, caller_abi
, &args
, dest
, ret
)
472 place
: PlaceTy
<'tcx
, M
::PointerTag
>,
473 instance
: ty
::Instance
<'tcx
>,
475 target
: mir
::BasicBlock
,
476 ) -> InterpResult
<'tcx
> {
477 trace
!("drop_in_place: {:?},\n {:?}, {:?}", *place
, place
.layout
.ty
, instance
);
478 // We take the address of the object. This may well be unaligned, which is fine
479 // for us here. However, unaligned accesses will probably make the actual drop
480 // implementation fail -- a problem shared by rustc.
481 let place
= self.force_allocation(place
)?
;
483 let (instance
, place
) = match place
.layout
.ty
.kind
{
485 // Dropping a trait object.
486 self.unpack_dyn_trait(place
)?
488 _
=> (instance
, place
),
493 layout
: self.layout_of(self.tcx
.mk_mut_ptr(place
.layout
.ty
))?
,
496 let ty
= self.tcx
.mk_unit(); // return type is ()
497 let dest
= MPlaceTy
::dangling(self.layout_of(ty
)?
, self);
500 FnVal
::Instance(instance
),