2 use std
::convert
::TryFrom
;
4 use rustc_middle
::ty
::layout
::{FnAbiOf, LayoutOf}
;
5 use rustc_middle
::ty
::Instance
;
10 use rustc_target
::abi
;
11 use rustc_target
::abi
::call
::{ArgAbi, ArgAttribute, ArgAttributes, FnAbi, PassMode}
;
12 use rustc_target
::spec
::abi
::Abi
;
15 FnVal
, ImmTy
, InterpCx
, InterpResult
, MPlaceTy
, Machine
, OpTy
, PlaceTy
, Scalar
,
16 StackPopCleanup
, StackPopUnwind
,
19 impl<'mir
, 'tcx
: 'mir
, M
: Machine
<'mir
, 'tcx
>> InterpCx
<'mir
, 'tcx
, M
> {
20 pub(super) fn eval_terminator(
22 terminator
: &mir
::Terminator
<'tcx
>,
23 ) -> InterpResult
<'tcx
> {
24 use rustc_middle
::mir
::TerminatorKind
::*;
25 match terminator
.kind
{
27 self.pop_stack_frame(/* unwinding */ false)?
30 Goto { target }
=> self.go_to_block(target
),
32 SwitchInt { ref discr, ref targets, switch_ty }
=> {
33 let discr
= self.read_immediate(&self.eval_operand(discr
, None
)?
)?
;
34 trace
!("SwitchInt({:?})", *discr
);
35 assert_eq
!(discr
.layout
.ty
, switch_ty
);
37 // Branch to the `otherwise` case by default, if no match is found.
38 assert
!(!targets
.iter().is_empty());
39 let mut target_block
= targets
.otherwise();
41 for (const_int
, target
) in targets
.iter() {
42 // Compare using MIR BinOp::Eq, to also support pointer values.
43 // (Avoiding `self.binary_op` as that does some redundant layout computation.)
45 .overflowing_binary_op(
48 &ImmTy
::from_uint(const_int
, discr
.layout
),
52 target_block
= target
;
57 self.go_to_block(target_block
);
69 let old_stack
= self.frame_idx();
70 let old_loc
= self.frame().loc
;
71 let func
= self.eval_operand(func
, None
)?
;
72 let args
= self.eval_operands(args
)?
;
74 let fn_sig_binder
= func
.layout
.ty
.fn_sig(*self.tcx
);
76 self.tcx
.normalize_erasing_late_bound_regions(self.param_env
, fn_sig_binder
);
77 let extra_args
= &args
[fn_sig
.inputs().len()..];
78 let extra_args
= self.tcx
.mk_type_list(extra_args
.iter().map(|arg
| arg
.layout
.ty
));
80 let (fn_val
, fn_abi
, with_caller_location
) = match *func
.layout
.ty
.kind() {
82 let fn_ptr
= self.read_pointer(&func
)?
;
83 let fn_val
= self.get_ptr_fn(fn_ptr
)?
;
84 (fn_val
, self.fn_abi_of_fn_ptr(fn_sig_binder
, extra_args
)?
, false)
86 ty
::FnDef(def_id
, substs
) => {
88 self.resolve(ty
::WithOptConstParam
::unknown(def_id
), substs
)?
;
90 FnVal
::Instance(instance
),
91 self.fn_abi_of_instance(instance
, extra_args
)?
,
92 instance
.def
.requires_caller_location(*self.tcx
),
96 terminator
.source_info
.span
,
97 "invalid callee of type {:?}",
102 let destination
= self.eval_place(destination
)?
;
105 (fn_sig
.abi
, fn_abi
),
107 with_caller_location
,
110 match (cleanup
, fn_abi
.can_unwind
) {
111 (Some(cleanup
), true) => StackPopUnwind
::Cleanup(*cleanup
),
112 (None
, true) => StackPopUnwind
::Skip
,
113 (_
, false) => StackPopUnwind
::NotAllowed
,
116 // Sanity-check that `eval_fn_call` either pushed a new frame or
117 // did a jump to another block.
118 if self.frame_idx() == old_stack
&& self.frame().loc
== old_loc
{
119 span_bug
!(terminator
.source_info
.span
, "evaluating this call made no progress");
123 Drop { place, target, unwind }
=> {
124 let place
= self.eval_place(place
)?
;
125 let ty
= place
.layout
.ty
;
126 trace
!("TerminatorKind::drop: {:?}, type {}", place
, ty
);
128 let instance
= Instance
::resolve_drop_in_place(*self.tcx
, ty
);
129 self.drop_in_place(&place
, instance
, target
, unwind
)?
;
132 Assert { ref cond, expected, ref msg, target, cleanup }
=> {
134 self.read_immediate(&self.eval_operand(cond
, None
)?
)?
.to_scalar()?
.to_bool()?
;
135 if expected
== cond_val
{
136 self.go_to_block(target
);
138 M
::assert_panic(self, msg
, cleanup
)?
;
143 M
::abort(self, "the program aborted execution".to_owned())?
;
146 // When we encounter Resume, we've finished unwinding
147 // cleanup for the current stack frame. We pop it in order
148 // to continue unwinding the next frame
150 trace
!("unwinding: resuming from cleanup");
151 // By definition, a Resume terminator means
152 // that we're unwinding
153 self.pop_stack_frame(/* unwinding */ true)?
;
157 // It is UB to ever encounter this.
158 Unreachable
=> throw_ub
!(Unreachable
),
160 // These should never occur for MIR we actually run.
161 DropAndReplace { .. }
165 | GeneratorDrop
=> span_bug
!(
166 terminator
.source_info
.span
,
167 "{:#?} should have been eliminated by MIR pass",
171 // Inline assembly can't be interpreted.
172 InlineAsm { .. }
=> throw_unsup_format
!("inline assembly is not supported"),
178 fn check_argument_compat(
179 caller_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
180 callee_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
182 // Heuristic for type comparison.
183 let layout_compat
= || {
184 if caller_abi
.layout
.ty
== callee_abi
.layout
.ty
{
188 if caller_abi
.layout
.size
!= callee_abi
.layout
.size
189 || caller_abi
.layout
.align
.abi
!= callee_abi
.layout
.align
.abi
191 // This cannot go well...
192 // FIXME: What about unsized types?
195 // The rest *should* be okay, but we are extra conservative.
196 match (caller_abi
.layout
.abi
, callee_abi
.layout
.abi
) {
197 // Different valid ranges are okay (once we enforce validity,
198 // that will take care to make it UB to leave the range, just
199 // like for transmute).
200 (abi
::Abi
::Scalar(caller
), abi
::Abi
::Scalar(callee
)) => {
201 caller
.primitive() == callee
.primitive()
204 abi
::Abi
::ScalarPair(caller1
, caller2
),
205 abi
::Abi
::ScalarPair(callee1
, callee2
),
207 caller1
.primitive() == callee1
.primitive()
208 && caller2
.primitive() == callee2
.primitive()
214 // Padding must be fully equal.
215 let pad_compat
= || caller_abi
.pad
== callee_abi
.pad
;
216 // When comparing the PassMode, we have to be smart about comparing the attributes.
217 let arg_attr_compat
= |a1
: ArgAttributes
, a2
: ArgAttributes
| {
218 // There's only one regular attribute that matters for the call ABI: InReg.
219 // Everything else is things like noalias, dereferencable, nonnull, ...
220 // (This also applies to pointee_size, pointee_align.)
221 if a1
.regular
.contains(ArgAttribute
::InReg
) != a2
.regular
.contains(ArgAttribute
::InReg
)
225 // We also compare the sign extension mode -- this could let the callee make assumptions
226 // about bits that conceptually were not even passed.
227 if a1
.arg_ext
!= a2
.arg_ext
{
232 let mode_compat
= || match (caller_abi
.mode
, callee_abi
.mode
) {
233 (PassMode
::Ignore
, PassMode
::Ignore
) => true,
234 (PassMode
::Direct(a1
), PassMode
::Direct(a2
)) => arg_attr_compat(a1
, a2
),
235 (PassMode
::Pair(a1
, b1
), PassMode
::Pair(a2
, b2
)) => {
236 arg_attr_compat(a1
, a2
) && arg_attr_compat(b1
, b2
)
238 (PassMode
::Cast(c1
), PassMode
::Cast(c2
)) => c1
== c2
,
240 PassMode
::Indirect { attrs: a1, extra_attrs: None, on_stack: s1 }
,
241 PassMode
::Indirect { attrs: a2, extra_attrs: None, on_stack: s2 }
,
242 ) => arg_attr_compat(a1
, a2
) && s1
== s2
,
244 PassMode
::Indirect { attrs: a1, extra_attrs: Some(e1), on_stack: s1 }
,
245 PassMode
::Indirect { attrs: a2, extra_attrs: Some(e2), on_stack: s2 }
,
246 ) => arg_attr_compat(a1
, a2
) && arg_attr_compat(e1
, e2
) && s1
== s2
,
250 if layout_compat() && pad_compat() && mode_compat() {
254 "check_argument_compat: incompatible ABIs:\ncaller: {:?}\ncallee: {:?}",
261 /// Initialize a single callee argument, checking the types for compatibility.
262 fn pass_argument
<'x
, 'y
>(
264 caller_args
: &mut impl Iterator
<
265 Item
= (&'x OpTy
<'tcx
, M
::PointerTag
>, &'y ArgAbi
<'tcx
, Ty
<'tcx
>>),
267 callee_abi
: &ArgAbi
<'tcx
, Ty
<'tcx
>>,
268 callee_arg
: &PlaceTy
<'tcx
, M
::PointerTag
>,
269 ) -> InterpResult
<'tcx
>
274 if matches
!(callee_abi
.mode
, PassMode
::Ignore
) {
275 // This one is skipped.
278 // Find next caller arg.
279 let (caller_arg
, caller_abi
) = caller_args
.next().ok_or_else(|| {
280 err_ub_format
!("calling a function with fewer arguments than it requires")
283 if !Self::check_argument_compat(caller_abi
, callee_abi
) {
285 "calling a function with argument of type {:?} passing data of type {:?}",
286 callee_arg
.layout
.ty
,
290 // We allow some transmutes here.
291 // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
292 // is true for all `copy_op`, but there are a lot of special cases for argument passing
294 self.copy_op_transmute(&caller_arg
, callee_arg
)
297 /// Call this function -- pushing the stack frame and initializing the arguments.
299 /// `caller_fn_abi` is used to determine if all the arguments are passed the proper way.
300 /// However, we also need `caller_abi` to determine if we need to do untupling of arguments.
302 /// `with_caller_location` indicates whether the caller passed a caller location. Miri
303 /// implements caller locations without argument passing, but to match `FnAbi` we need to know
304 /// when those arguments are present.
305 pub(crate) fn eval_fn_call(
307 fn_val
: FnVal
<'tcx
, M
::ExtraFnVal
>,
308 (caller_abi
, caller_fn_abi
): (Abi
, &FnAbi
<'tcx
, Ty
<'tcx
>>),
309 args
: &[OpTy
<'tcx
, M
::PointerTag
>],
310 with_caller_location
: bool
,
311 destination
: &PlaceTy
<'tcx
, M
::PointerTag
>,
312 target
: Option
<mir
::BasicBlock
>,
313 mut unwind
: StackPopUnwind
,
314 ) -> InterpResult
<'tcx
> {
315 trace
!("eval_fn_call: {:#?}", fn_val
);
317 let instance
= match fn_val
{
318 FnVal
::Instance(instance
) => instance
,
319 FnVal
::Other(extra
) => {
320 return M
::call_extra_fn(
333 ty
::InstanceDef
::Intrinsic(def_id
) => {
334 assert
!(self.tcx
.is_intrinsic(def_id
));
335 // caller_fn_abi is not relevant here, we interpret the arguments directly for each intrinsic.
336 M
::call_intrinsic(self, instance
, args
, destination
, target
, unwind
)
338 ty
::InstanceDef
::VtableShim(..)
339 | ty
::InstanceDef
::ReifyShim(..)
340 | ty
::InstanceDef
::ClosureOnceShim { .. }
341 | ty
::InstanceDef
::FnPtrShim(..)
342 | ty
::InstanceDef
::DropGlue(..)
343 | ty
::InstanceDef
::CloneShim(..)
344 | ty
::InstanceDef
::Item(_
) => {
345 // We need MIR for this fn
346 let Some((body
, instance
)) =
347 M
::find_mir_or_eval_fn(self, instance
, caller_abi
, args
, destination
, target
, unwind
)?
else {
351 // Compute callee information using the `instance` returned by
352 // `find_mir_or_eval_fn`.
353 // FIXME: for variadic support, do we have to somehow determine callee's extra_args?
354 let callee_fn_abi
= self.fn_abi_of_instance(instance
, ty
::List
::empty())?
;
356 if callee_fn_abi
.c_variadic
|| caller_fn_abi
.c_variadic
{
357 throw_unsup_format
!("calling a c-variadic function is not supported");
360 if M
::enforce_abi(self) {
361 if caller_fn_abi
.conv
!= callee_fn_abi
.conv
{
363 "calling a function with calling convention {:?} using calling convention {:?}",
370 if !matches
!(unwind
, StackPopUnwind
::NotAllowed
) && !callee_fn_abi
.can_unwind
{
371 // The callee cannot unwind.
372 unwind
= StackPopUnwind
::NotAllowed
;
375 self.push_stack_frame(
379 StackPopCleanup
::Goto { ret: target, unwind }
,
382 // If an error is raised here, pop the frame again to get an accurate backtrace.
383 // To this end, we wrap it all in a `try` block.
384 let res
: InterpResult
<'tcx
> = try
{
386 "caller ABI: {:?}, args: {:#?}",
389 .map(|arg
| (arg
.layout
.ty
, format
!("{:?}", **arg
)))
393 "spread_arg: {:?}, locals: {:#?}",
398 self.layout_of_local(self.frame(), local
, None
).unwrap().ty
403 // In principle, we have two iterators: Where the arguments come from, and where
406 // For where they come from: If the ABI is RustCall, we untuple the
407 // last incoming argument. These two iterators do not have the same type,
408 // so to keep the code paths uniform we accept an allocation
409 // (for RustCall ABI only).
410 let caller_args
: Cow
<'_
, [OpTy
<'tcx
, M
::PointerTag
>]> =
411 if caller_abi
== Abi
::RustCall
&& !args
.is_empty() {
413 let (untuple_arg
, args
) = args
.split_last().unwrap();
414 trace
!("eval_fn_call: Will pass last argument by untupling");
419 (0..untuple_arg
.layout
.fields
.count())
420 .map(|i
| self.operand_field(untuple_arg
, i
)),
422 .collect
::<InterpResult
<'_
, Vec
<OpTy
<'tcx
, M
::PointerTag
>>>>(
429 // If `with_caller_location` is set we pretend there is an extra argument (that
430 // we will not pass).
432 caller_args
.len() + if with_caller_location { 1 }
else { 0 }
,
433 caller_fn_abi
.args
.len(),
434 "mismatch between caller ABI and caller arguments",
436 let mut caller_args
= caller_args
438 .zip(caller_fn_abi
.args
.iter())
439 .filter(|arg_and_abi
| !matches
!(arg_and_abi
.1.mode
, PassMode
::Ignore
));
441 // Now we have to spread them out across the callee's locals,
442 // taking into account the `spread_arg`. If we could write
443 // this is a single iterator (that handles `spread_arg`), then
444 // `pass_argument` would be the loop body. It takes care to
445 // not advance `caller_iter` for ZSTs.
446 let mut callee_args_abis
= callee_fn_abi
.args
.iter();
447 for local
in body
.args_iter() {
448 let dest
= self.eval_place(mir
::Place
::from(local
))?
;
449 if Some(local
) == body
.spread_arg
{
451 for i
in 0..dest
.layout
.fields
.count() {
452 let dest
= self.place_field(&dest
, i
)?
;
453 let callee_abi
= callee_args_abis
.next().unwrap();
454 self.pass_argument(&mut caller_args
, callee_abi
, &dest
)?
;
458 let callee_abi
= callee_args_abis
.next().unwrap();
459 self.pass_argument(&mut caller_args
, callee_abi
, &dest
)?
;
462 // If the callee needs a caller location, pretend we consume one more argument from the ABI.
463 if instance
.def
.requires_caller_location(*self.tcx
) {
464 callee_args_abis
.next().unwrap();
466 // Now we should have no more caller args or callee arg ABIs
468 callee_args_abis
.next().is_none(),
469 "mismatch between callee ABI and callee body arguments"
471 if caller_args
.next().is_some() {
472 throw_ub_format
!("calling a function with more arguments than it expected")
474 // Don't forget to check the return type!
475 if !Self::check_argument_compat(&caller_fn_abi
.ret
, &callee_fn_abi
.ret
) {
477 "calling a function with return type {:?} passing \
478 return place of type {:?}",
479 callee_fn_abi
.ret
.layout
.ty
,
480 caller_fn_abi
.ret
.layout
.ty
,
486 self.stack_mut().pop();
492 // cannot use the shim here, because that will only result in infinite recursion
493 ty
::InstanceDef
::Virtual(_
, idx
) => {
494 let mut args
= args
.to_vec();
495 // We have to implement all "object safe receivers". So we have to go search for a
496 // pointer or `dyn Trait` type, but it could be wrapped in newtypes. So recursively
497 // unwrap those newtypes until we are there.
498 let mut receiver
= args
[0];
499 let receiver_place
= loop {
500 match receiver
.layout
.ty
.kind() {
501 ty
::Ref(..) | ty
::RawPtr(..) => break self.deref_operand(&receiver
)?
,
502 ty
::Dynamic(..) => break receiver
.assert_mem_place(),
504 // Not there yet, search for the only non-ZST field.
505 let mut non_zst_field
= None
;
506 for i
in 0..receiver
.layout
.fields
.count() {
507 let field
= self.operand_field(&receiver
, i
)?
;
508 if !field
.layout
.is_zst() {
510 non_zst_field
.is_none(),
511 "multiple non-ZST fields in dyn receiver type {}",
514 non_zst_field
= Some(field
);
517 receiver
= non_zst_field
.unwrap_or_else(|| {
519 "no non-ZST fields in dyn receiver type {}",
526 // Find and consult vtable. The type now could be something like RcBox<dyn Trait>,
527 // i.e., it is still not necessarily `ty::Dynamic` (so we cannot use
528 // `place.vtable()`), but it should have a `dyn Trait` tail.
531 .struct_tail_erasing_lifetimes(receiver_place
.layout
.ty
, self.param_env
)
535 let vtable
= self.scalar_to_ptr(receiver_place
.meta
.unwrap_meta())?
;
536 let fn_val
= self.get_vtable_slot(vtable
, u64::try_from(idx
).unwrap())?
;
538 // `*mut receiver_place.layout.ty` is almost the layout that we
539 // want for args[0]: We have to project to field 0 because we want
541 assert
!(receiver_place
.layout
.is_unsized());
542 let receiver_ptr_ty
= self.tcx
.mk_mut_ptr(receiver_place
.layout
.ty
);
543 let this_receiver_ptr
= self.layout_of(receiver_ptr_ty
)?
.field(self, 0);
544 // Adjust receiver argument.
545 args
[0] = OpTy
::from(ImmTy
::from_immediate(
546 Scalar
::from_maybe_pointer(receiver_place
.ptr
, self).into(),
549 trace
!("Patched receiver operand to {:#?}", args
[0]);
550 // recurse with concrete function
553 (caller_abi
, caller_fn_abi
),
555 with_caller_location
,
566 place
: &PlaceTy
<'tcx
, M
::PointerTag
>,
567 instance
: ty
::Instance
<'tcx
>,
568 target
: mir
::BasicBlock
,
569 unwind
: Option
<mir
::BasicBlock
>,
570 ) -> InterpResult
<'tcx
> {
571 trace
!("drop_in_place: {:?},\n {:?}, {:?}", *place
, place
.layout
.ty
, instance
);
572 // We take the address of the object. This may well be unaligned, which is fine
573 // for us here. However, unaligned accesses will probably make the actual drop
574 // implementation fail -- a problem shared by rustc.
575 let place
= self.force_allocation(place
)?
;
577 let (instance
, place
) = match place
.layout
.ty
.kind() {
579 // Dropping a trait object.
580 self.unpack_dyn_trait(&place
)?
582 _
=> (instance
, place
),
584 let fn_abi
= self.fn_abi_of_instance(instance
, ty
::List
::empty())?
;
586 let arg
= ImmTy
::from_immediate(
588 self.layout_of(self.tcx
.mk_mut_ptr(place
.layout
.ty
))?
,
591 let ty
= self.tcx
.mk_unit(); // return type is ()
592 let dest
= MPlaceTy
::dangling(self.layout_of(ty
)?
);
595 FnVal
::Instance(instance
),
602 Some(cleanup
) => StackPopUnwind
::Cleanup(cleanup
),
603 None
=> StackPopUnwind
::Skip
,