2 use std
::convert
::TryFrom
;
4 use rustc_middle
::middle
::codegen_fn_attrs
::CodegenFnAttrFlags
;
5 use rustc_middle
::ty
::layout
::{self, TyAndLayout}
;
6 use rustc_middle
::ty
::Instance
;
11 use rustc_target
::abi
::{self, LayoutOf as _}
;
12 use rustc_target
::spec
::abi
::Abi
;
15 FnVal
, ImmTy
, InterpCx
, InterpResult
, MPlaceTy
, Machine
, OpTy
, PlaceTy
, StackPopCleanup
,
19 impl<'mir
, 'tcx
: 'mir
, M
: Machine
<'mir
, 'tcx
>> InterpCx
<'mir
, 'tcx
, M
> {
20 fn fn_can_unwind(&self, attrs
: CodegenFnAttrFlags
, abi
: Abi
) -> bool
{
21 layout
::fn_can_unwind(
22 self.tcx
.sess
.panic_strategy(),
24 layout
::conv_from_spec_abi(*self.tcx
, abi
),
29 pub(super) fn eval_terminator(
31 terminator
: &mir
::Terminator
<'tcx
>,
32 ) -> InterpResult
<'tcx
> {
33 use rustc_middle
::mir
::TerminatorKind
::*;
34 match terminator
.kind
{
36 self.pop_stack_frame(/* unwinding */ false)?
39 Goto { target }
=> self.go_to_block(target
),
41 SwitchInt { ref discr, ref targets, switch_ty }
=> {
42 let discr
= self.read_immediate(&self.eval_operand(discr
, None
)?
)?
;
43 trace
!("SwitchInt({:?})", *discr
);
44 assert_eq
!(discr
.layout
.ty
, switch_ty
);
46 // Branch to the `otherwise` case by default, if no match is found.
47 assert
!(!targets
.iter().is_empty());
48 let mut target_block
= targets
.otherwise();
50 for (const_int
, target
) in targets
.iter() {
51 // Compare using binary_op, to also support pointer values
53 .overflowing_binary_op(
56 &ImmTy
::from_uint(const_int
, discr
.layout
),
60 target_block
= target
;
65 self.go_to_block(target_block
);
68 Call { ref func, ref args, destination, ref cleanup, from_hir_call: _, fn_span: _ }
=> {
69 let old_stack
= self.frame_idx();
70 let old_loc
= self.frame().loc
;
71 let func
= self.eval_operand(func
, None
)?
;
72 let (fn_val
, abi
, caller_can_unwind
) = match *func
.layout
.ty
.kind() {
74 let caller_abi
= sig
.abi();
75 let fn_ptr
= self.read_scalar(&func
)?
.check_init()?
;
76 let fn_val
= self.memory
.get_fn(fn_ptr
)?
;
80 self.fn_can_unwind(layout
::fn_ptr_codegen_fn_attr_flags(), caller_abi
),
83 ty
::FnDef(def_id
, substs
) => {
84 let sig
= func
.layout
.ty
.fn_sig(*self.tcx
);
87 self.resolve(ty
::WithOptConstParam
::unknown(def_id
), substs
)?
,
90 self.fn_can_unwind(self.tcx
.codegen_fn_attrs(def_id
).flags
, sig
.abi()),
94 terminator
.source_info
.span
,
95 "invalid callee of type {:?}",
99 let args
= self.eval_operands(args
)?
;
101 let ret
= match destination
{
102 Some((dest
, ret
)) => {
103 dest_place
= self.eval_place(dest
)?
;
104 Some((&dest_place
, ret
))
113 match (cleanup
, caller_can_unwind
) {
114 (Some(cleanup
), true) => StackPopUnwind
::Cleanup(*cleanup
),
115 (None
, true) => StackPopUnwind
::Skip
,
116 (_
, false) => StackPopUnwind
::NotAllowed
,
119 // Sanity-check that `eval_fn_call` either pushed a new frame or
120 // did a jump to another block.
121 if self.frame_idx() == old_stack
&& self.frame().loc
== old_loc
{
122 span_bug
!(terminator
.source_info
.span
, "evaluating this call made no progress");
126 Drop { place, target, unwind }
=> {
127 let place
= self.eval_place(place
)?
;
128 let ty
= place
.layout
.ty
;
129 trace
!("TerminatorKind::drop: {:?}, type {}", place
, ty
);
131 let instance
= Instance
::resolve_drop_in_place(*self.tcx
, ty
);
132 self.drop_in_place(&place
, instance
, target
, unwind
)?
;
135 Assert { ref cond, expected, ref msg, target, cleanup }
=> {
137 self.read_immediate(&self.eval_operand(cond
, None
)?
)?
.to_scalar()?
.to_bool()?
;
138 if expected
== cond_val
{
139 self.go_to_block(target
);
141 M
::assert_panic(self, msg
, cleanup
)?
;
146 M
::abort(self, "the program aborted execution".to_owned())?
;
149 // When we encounter Resume, we've finished unwinding
150 // cleanup for the current stack frame. We pop it in order
151 // to continue unwinding the next frame
153 trace
!("unwinding: resuming from cleanup");
154 // By definition, a Resume terminator means
155 // that we're unwinding
156 self.pop_stack_frame(/* unwinding */ true)?
;
160 // It is UB to ever encounter this.
161 Unreachable
=> throw_ub
!(Unreachable
),
163 // These should never occur for MIR we actually run.
164 DropAndReplace { .. }
168 | GeneratorDrop
=> span_bug
!(
169 terminator
.source_info
.span
,
170 "{:#?} should have been eliminated by MIR pass",
174 // Inline assembly can't be interpreted.
175 InlineAsm { .. }
=> throw_unsup_format
!("inline assembly is not supported"),
181 fn check_argument_compat(
183 caller
: TyAndLayout
<'tcx
>,
184 callee
: TyAndLayout
<'tcx
>,
186 if caller
.ty
== callee
.ty
{
191 // Don't risk anything
195 match (&caller
.abi
, &callee
.abi
) {
196 // Different valid ranges are okay (once we enforce validity,
197 // that will take care to make it UB to leave the range, just
198 // like for transmute).
199 (abi
::Abi
::Scalar(ref caller
), abi
::Abi
::Scalar(ref callee
)) => {
200 caller
.value
== callee
.value
203 abi
::Abi
::ScalarPair(ref caller1
, ref caller2
),
204 abi
::Abi
::ScalarPair(ref callee1
, ref callee2
),
205 ) => caller1
.value
== callee1
.value
&& caller2
.value
== callee2
.value
,
211 /// Pass a single argument, checking the types for compatibility.
215 caller_arg
: &mut impl Iterator
<Item
= OpTy
<'tcx
, M
::PointerTag
>>,
216 callee_arg
: &PlaceTy
<'tcx
, M
::PointerTag
>,
217 ) -> InterpResult
<'tcx
> {
218 if rust_abi
&& callee_arg
.layout
.is_zst() {
220 trace
!("Skipping callee ZST");
223 let caller_arg
= caller_arg
.next().ok_or_else(|| {
224 err_ub_format
!("calling a function with fewer arguments than it requires")
227 assert
!(!caller_arg
.layout
.is_zst(), "ZSTs must have been already filtered out");
230 if !Self::check_argument_compat(rust_abi
, caller_arg
.layout
, callee_arg
.layout
) {
232 "calling a function with argument of type {:?} passing data of type {:?}",
233 callee_arg
.layout
.ty
,
237 // We allow some transmutes here
238 self.copy_op_transmute(&caller_arg
, callee_arg
)
241 /// Call this function -- pushing the stack frame and initializing the arguments.
244 fn_val
: FnVal
<'tcx
, M
::ExtraFnVal
>,
246 args
: &[OpTy
<'tcx
, M
::PointerTag
>],
247 ret
: Option
<(&PlaceTy
<'tcx
, M
::PointerTag
>, mir
::BasicBlock
)>,
248 mut unwind
: StackPopUnwind
,
249 ) -> InterpResult
<'tcx
> {
250 trace
!("eval_fn_call: {:#?}", fn_val
);
252 let instance
= match fn_val
{
253 FnVal
::Instance(instance
) => instance
,
254 FnVal
::Other(extra
) => {
255 return M
::call_extra_fn(self, extra
, caller_abi
, args
, ret
, unwind
);
259 let get_abi
= |this
: &Self, instance_ty
: Ty
<'tcx
>| match instance_ty
.kind() {
260 ty
::FnDef(..) => instance_ty
.fn_sig(*this
.tcx
).abi(),
261 ty
::Closure(..) => Abi
::RustCall
,
262 ty
::Generator(..) => Abi
::Rust
,
263 _
=> span_bug
!(this
.cur_span(), "unexpected callee ty: {:?}", instance_ty
),
267 let check_abi
= |callee_abi
: Abi
| -> InterpResult
<'tcx
> {
268 let normalize_abi
= |abi
| match abi
{
269 Abi
::Rust
| Abi
::RustCall
| Abi
::RustIntrinsic
| Abi
::PlatformIntrinsic
=>
270 // These are all the same ABI, really.
276 if normalize_abi(caller_abi
) != normalize_abi(callee_abi
) {
278 "calling a function with ABI {} using caller ABI {}",
287 ty
::InstanceDef
::Intrinsic(..) => {
288 if M
::enforce_abi(self) {
289 check_abi(get_abi(self, instance
.ty(*self.tcx
, self.param_env
)))?
;
291 assert
!(caller_abi
== Abi
::RustIntrinsic
|| caller_abi
== Abi
::PlatformIntrinsic
);
292 M
::call_intrinsic(self, instance
, args
, ret
, unwind
)
294 ty
::InstanceDef
::VtableShim(..)
295 | ty
::InstanceDef
::ReifyShim(..)
296 | ty
::InstanceDef
::ClosureOnceShim { .. }
297 | ty
::InstanceDef
::FnPtrShim(..)
298 | ty
::InstanceDef
::DropGlue(..)
299 | ty
::InstanceDef
::CloneShim(..)
300 | ty
::InstanceDef
::Item(_
) => {
301 // We need MIR for this fn
303 match M
::find_mir_or_eval_fn(self, instance
, caller_abi
, args
, ret
, unwind
)?
{
305 None
=> return Ok(()),
308 // Check against the ABI of the MIR body we are calling (not the ABI of `instance`;
309 // these can differ when `find_mir_or_eval_fn` does something clever like resolve
310 // exported symbol names).
311 let callee_def_id
= body
.source
.def_id();
312 let callee_abi
= get_abi(self, self.tcx
.type_of(callee_def_id
));
314 if M
::enforce_abi(self) {
315 check_abi(callee_abi
)?
;
318 if !matches
!(unwind
, StackPopUnwind
::NotAllowed
)
320 .fn_can_unwind(self.tcx
.codegen_fn_attrs(callee_def_id
).flags
, callee_abi
)
322 // The callee cannot unwind.
323 unwind
= StackPopUnwind
::NotAllowed
;
326 self.push_stack_frame(
330 StackPopCleanup
::Goto { ret: ret.map(|p| p.1), unwind }
,
333 // If an error is raised here, pop the frame again to get an accurate backtrace.
334 // To this end, we wrap it all in a `try` block.
335 let res
: InterpResult
<'tcx
> = try
{
337 "caller ABI: {:?}, args: {:#?}",
340 .map(|arg
| (arg
.layout
.ty
, format
!("{:?}", **arg
)))
344 "spread_arg: {:?}, locals: {:#?}",
349 self.layout_of_local(self.frame(), local
, None
).unwrap().ty
354 // Figure out how to pass which arguments.
355 // The Rust ABI is special: ZST get skipped.
356 let rust_abi
= match caller_abi
{
357 Abi
::Rust
| Abi
::RustCall
=> true,
360 // We have two iterators: Where the arguments come from,
361 // and where they go to.
363 // For where they come from: If the ABI is RustCall, we untuple the
364 // last incoming argument. These two iterators do not have the same type,
365 // so to keep the code paths uniform we accept an allocation
366 // (for RustCall ABI only).
367 let caller_args
: Cow
<'_
, [OpTy
<'tcx
, M
::PointerTag
>]> =
368 if caller_abi
== Abi
::RustCall
&& !args
.is_empty() {
370 let (untuple_arg
, args
) = args
.split_last().unwrap();
371 trace
!("eval_fn_call: Will pass last argument by untupling");
376 (0..untuple_arg
.layout
.fields
.count())
377 .map(|i
| self.operand_field(untuple_arg
, i
)),
379 .collect
::<InterpResult
<'_
, Vec
<OpTy
<'tcx
, M
::PointerTag
>>>>(
387 let mut caller_iter
=
388 caller_args
.iter().filter(|op
| !rust_abi
|| !op
.layout
.is_zst()).copied();
390 // Now we have to spread them out across the callee's locals,
391 // taking into account the `spread_arg`. If we could write
392 // this is a single iterator (that handles `spread_arg`), then
393 // `pass_argument` would be the loop body. It takes care to
394 // not advance `caller_iter` for ZSTs.
395 for local
in body
.args_iter() {
396 let dest
= self.eval_place(mir
::Place
::from(local
))?
;
397 if Some(local
) == body
.spread_arg
{
399 for i
in 0..dest
.layout
.fields
.count() {
400 let dest
= self.place_field(&dest
, i
)?
;
401 self.pass_argument(rust_abi
, &mut caller_iter
, &dest
)?
;
405 self.pass_argument(rust_abi
, &mut caller_iter
, &dest
)?
;
408 // Now we should have no more caller args
409 if caller_iter
.next().is_some() {
410 throw_ub_format
!("calling a function with more arguments than it expected")
412 // Don't forget to check the return type!
413 if let Some((caller_ret
, _
)) = ret
{
414 let callee_ret
= self.eval_place(mir
::Place
::return_place())?
;
415 if !Self::check_argument_compat(
421 "calling a function with return type {:?} passing \
422 return place of type {:?}",
423 callee_ret
.layout
.ty
,
428 let local
= mir
::RETURN_PLACE
;
429 let callee_layout
= self.layout_of_local(self.frame(), local
, None
)?
;
430 if !callee_layout
.abi
.is_uninhabited() {
431 throw_ub_format
!("calling a returning function without a return place")
437 self.stack_mut().pop();
443 // cannot use the shim here, because that will only result in infinite recursion
444 ty
::InstanceDef
::Virtual(_
, idx
) => {
445 let mut args
= args
.to_vec();
446 // We have to implement all "object safe receivers". Currently we
447 // support built-in pointers `(&, &mut, Box)` as well as unsized-self. We do
448 // not yet support custom self types.
449 // Also see `compiler/rustc_codegen_llvm/src/abi.rs` and `compiler/rustc_codegen_ssa/src/mir/block.rs`.
450 let receiver_place
= match args
[0].layout
.ty
.builtin_deref(true) {
453 self.deref_operand(&args
[0])?
457 args
[0].assert_mem_place(self)
460 // Find and consult vtable
461 let vtable
= receiver_place
.vtable();
462 let drop_fn
= self.get_vtable_slot(vtable
, u64::try_from(idx
).unwrap())?
;
464 // `*mut receiver_place.layout.ty` is almost the layout that we
465 // want for args[0]: We have to project to field 0 because we want
467 assert
!(receiver_place
.layout
.is_unsized());
468 let receiver_ptr_ty
= self.tcx
.mk_mut_ptr(receiver_place
.layout
.ty
);
469 let this_receiver_ptr
= self.layout_of(receiver_ptr_ty
)?
.field(self, 0)?
;
470 // Adjust receiver argument.
472 OpTy
::from(ImmTy
::from_immediate(receiver_place
.ptr
.into(), this_receiver_ptr
));
473 trace
!("Patched self operand to {:#?}", args
[0]);
474 // recurse with concrete function
475 self.eval_fn_call(drop_fn
, caller_abi
, &args
, ret
, unwind
)
482 place
: &PlaceTy
<'tcx
, M
::PointerTag
>,
483 instance
: ty
::Instance
<'tcx
>,
484 target
: mir
::BasicBlock
,
485 unwind
: Option
<mir
::BasicBlock
>,
486 ) -> InterpResult
<'tcx
> {
487 trace
!("drop_in_place: {:?},\n {:?}, {:?}", *place
, place
.layout
.ty
, instance
);
488 // We take the address of the object. This may well be unaligned, which is fine
489 // for us here. However, unaligned accesses will probably make the actual drop
490 // implementation fail -- a problem shared by rustc.
491 let place
= self.force_allocation(place
)?
;
493 let (instance
, place
) = match place
.layout
.ty
.kind() {
495 // Dropping a trait object.
496 self.unpack_dyn_trait(&place
)?
498 _
=> (instance
, place
),
501 let arg
= ImmTy
::from_immediate(
503 self.layout_of(self.tcx
.mk_mut_ptr(place
.layout
.ty
))?
,
506 let ty
= self.tcx
.mk_unit(); // return type is ()
507 let dest
= MPlaceTy
::dangling(self.layout_of(ty
)?
, self);
510 FnVal
::Instance(instance
),
513 Some((&dest
.into(), target
)),
515 Some(cleanup
) => StackPopUnwind
::Cleanup(cleanup
),
516 None
=> StackPopUnwind
::Skip
,