2 use rustc
::ty
::{self, Ty}
;
3 use rustc
::ty
::layout
::LayoutOf
;
4 use syntax
::codemap
::Span
;
7 use rustc
::mir
::interpret
::{EvalResult, PrimVal, Value}
;
8 use super::{EvalContext
, eval_context
,
9 Place
, Machine
, ValTy
};
11 use rustc_data_structures
::indexed_vec
::Idx
;
12 use interpret
::memory
::HasMemory
;
16 impl<'a
, 'tcx
, M
: Machine
<'tcx
>> EvalContext
<'a
, 'tcx
, M
> {
17 pub fn goto_block(&mut self, target
: mir
::BasicBlock
) {
18 self.frame_mut().block
= target
;
19 self.frame_mut().stmt
= 0;
22 pub(super) fn eval_terminator(
24 terminator
: &mir
::Terminator
<'tcx
>,
25 ) -> EvalResult
<'tcx
> {
26 use rustc
::mir
::TerminatorKind
::*;
27 match terminator
.kind
{
29 self.dump_local(self.frame().return_place
);
30 self.pop_stack_frame()?
33 Goto { target }
=> self.goto_block(target
),
41 // FIXME(CTFE): forbid branching
42 let discr_val
= self.eval_operand(discr
)?
;
43 let discr_prim
= self.value_to_primval(discr_val
)?
;
45 // Branch to the `otherwise` case by default, if no match is found.
46 let mut target_block
= targets
[targets
.len() - 1];
48 for (index
, const_int
) in values
.iter().enumerate() {
49 let prim
= PrimVal
::Bytes(const_int
.to_u128_unchecked());
50 if discr_prim
.to_bytes()?
== prim
.to_bytes()?
{
51 target_block
= targets
[index
];
56 self.goto_block(target_block
);
65 let destination
= match *destination
{
66 Some((ref lv
, target
)) => Some((self.eval_place(lv
)?
, target
)),
70 let func
= self.eval_operand(func
)?
;
71 let (fn_def
, sig
) = match func
.ty
.sty
{
73 let fn_ptr
= self.value_to_primval(func
)?
.to_ptr()?
;
74 let instance
= self.memory
.get_fn(fn_ptr
)?
;
75 let instance_ty
= instance
.ty(self.tcx
);
76 match instance_ty
.sty
{
78 let real_sig
= instance_ty
.fn_sig(self.tcx
);
79 let sig
= self.tcx
.erase_late_bound_regions_and_normalize(&sig
);
80 let real_sig
= self.tcx
.erase_late_bound_regions_and_normalize(&real_sig
);
81 if !self.check_sig_compat(sig
, real_sig
)?
{
82 return err
!(FunctionPointerTyMismatch(real_sig
, sig
));
85 ref other
=> bug
!("instance def ty: {:?}", other
),
89 ty
::TyFnDef(def_id
, substs
) => (
90 self.resolve(def_id
, substs
)?
,
91 func
.ty
.fn_sig(self.tcx
),
94 let msg
= format
!("can't handle callee of type {:?}", func
.ty
);
95 return err
!(Unimplemented(msg
));
98 let args
= self.operands_to_args(args
)?
;
99 let sig
= self.tcx
.erase_late_bound_regions_and_normalize(&sig
);
104 terminator
.source_info
.span
,
114 // FIXME(CTFE): forbid drop in const eval
115 let place
= self.eval_place(location
)?
;
116 let ty
= self.place_ty(location
);
117 let ty
= self.tcx
.trans_apply_param_substs(self.substs(), &ty
);
118 trace
!("TerminatorKind::drop: {:?}, type {}", location
, ty
);
120 let instance
= eval_context
::resolve_drop_in_place(self.tcx
, ty
);
125 terminator
.source_info
.span
,
137 let cond_val
= self.eval_operand_to_primval(cond
)?
.to_bool()?
;
138 if expected
== cond_val
{
139 self.goto_block(target
);
141 use rustc
::mir
::AssertMessage
::*;
143 BoundsCheck { ref len, ref index }
=> {
144 let span
= terminator
.source_info
.span
;
145 let len
= self.eval_operand_to_primval(len
)
146 .expect("can't eval len")
148 let index
= self.eval_operand_to_primval(index
)
149 .expect("can't eval index")
151 err
!(ArrayIndexOutOfBounds(span
, len
, index
))
154 err
!(Math(terminator
.source_info
.span
, err
.clone()))
156 GeneratorResumedAfterReturn
|
157 GeneratorResumedAfterPanic
=> unimplemented
!(),
162 Yield { .. }
=> unimplemented
!("{:#?}", terminator
.kind
),
163 GeneratorDrop
=> unimplemented
!(),
164 DropAndReplace { .. }
=> unimplemented
!(),
165 Resume
=> unimplemented
!(),
166 Abort
=> unimplemented
!(),
167 FalseEdges { .. }
=> bug
!("should have been eliminated by `simplify_branches` mir pass"),
168 FalseUnwind { .. }
=> bug
!("should have been eliminated by `simplify_branches` mir pass"),
169 Unreachable
=> return err
!(Unreachable
),
175 /// Decides whether it is okay to call the method with signature `real_sig` using signature `sig`.
176 /// FIXME: This should take into account the platform-dependent ABI description.
179 sig
: ty
::FnSig
<'tcx
>,
180 real_sig
: ty
::FnSig
<'tcx
>,
181 ) -> EvalResult
<'tcx
, bool
> {
182 fn check_ty_compat
<'tcx
>(ty
: Ty
<'tcx
>, real_ty
: Ty
<'tcx
>) -> bool
{
185 } // This is actually a fast pointer comparison
186 return match (&ty
.sty
, &real_ty
.sty
) {
187 // Permit changing the pointer type of raw pointers and references as well as
188 // mutability of raw pointers.
189 // TODO: Should not be allowed when fat pointers are involved.
190 (&ty
::TyRawPtr(_
), &ty
::TyRawPtr(_
)) => true,
191 (&ty
::TyRef(_
, _
), &ty
::TyRef(_
, _
)) => {
192 ty
.is_mutable_pointer() == real_ty
.is_mutable_pointer()
194 // rule out everything else
199 if sig
.abi
== real_sig
.abi
&& sig
.variadic
== real_sig
.variadic
&&
200 sig
.inputs_and_output
.len() == real_sig
.inputs_and_output
.len() &&
201 sig
.inputs_and_output
203 .zip(real_sig
.inputs_and_output
)
204 .all(|(ty
, real_ty
)| check_ty_compat(ty
, real_ty
))
210 if sig
.variadic
|| real_sig
.variadic
{
211 // We're not touching this
215 // We need to allow what comes up when a non-capturing closure is cast to a fn().
216 match (sig
.abi
, real_sig
.abi
) {
217 (Abi
::Rust
, Abi
::RustCall
) // check the ABIs. This makes the test here non-symmetric.
218 if check_ty_compat(sig
.output(), real_sig
.output()) && real_sig
.inputs_and_output
.len() == 3 => {
219 // First argument of real_sig must be a ZST
220 let fst_ty
= real_sig
.inputs_and_output
[0];
221 if self.layout_of(fst_ty
)?
.is_zst() {
222 // Second argument must be a tuple matching the argument list of sig
223 let snd_ty
= real_sig
.inputs_and_output
[1];
225 ty
::TyTuple(tys
, _
) if sig
.inputs().len() == tys
.len() =>
226 if sig
.inputs().iter().zip(tys
).all(|(ty
, real_ty
)| check_ty_compat(ty
, real_ty
)) {
236 // Nope, this doesn't work.
242 instance
: ty
::Instance
<'tcx
>,
243 destination
: Option
<(Place
, mir
::BasicBlock
)>,
244 args
: &[ValTy
<'tcx
>],
246 sig
: ty
::FnSig
<'tcx
>,
247 ) -> EvalResult
<'tcx
> {
248 trace
!("eval_fn_call: {:#?}", instance
);
250 ty
::InstanceDef
::Intrinsic(..) => {
251 let (ret
, target
) = match destination
{
253 _
=> return err
!(Unreachable
),
255 let ty
= sig
.output();
256 let layout
= self.layout_of(ty
)?
;
257 M
::call_intrinsic(self, instance
, args
, ret
, layout
, target
)?
;
258 self.dump_local(ret
);
261 // FIXME: figure out why we can't just go through the shim
262 ty
::InstanceDef
::ClosureOnceShim { .. }
=> {
263 if M
::eval_fn_call(self, instance
, destination
, args
, span
, sig
)?
{
266 let mut arg_locals
= self.frame().mir
.args_iter();
268 // closure as closure once
270 for (arg_local
, &valty
) in arg_locals
.zip(args
) {
271 let dest
= self.eval_place(&mir
::Place
::Local(arg_local
))?
;
272 self.write_value(valty
, dest
)?
;
275 // non capture closure as fn ptr
276 // need to inject zst ptr for closure object (aka do nothing)
277 // and need to pack arguments
281 self.frame().mir
.args_iter().collect
::<Vec
<_
>>()
283 trace
!("args: {:?}", args
);
284 let local
= arg_locals
.nth(1).unwrap();
285 for (i
, &valty
) in args
.into_iter().enumerate() {
286 let dest
= self.eval_place(&mir
::Place
::Local(local
).field(
290 self.write_value(valty
, dest
)?
;
293 _
=> bug
!("bad ABI for ClosureOnceShim: {:?}", sig
.abi
),
297 ty
::InstanceDef
::FnPtrShim(..) |
298 ty
::InstanceDef
::DropGlue(..) |
299 ty
::InstanceDef
::CloneShim(..) |
300 ty
::InstanceDef
::Item(_
) => {
301 // Push the stack frame, and potentially be entirely done if the call got hooked
302 if M
::eval_fn_call(self, instance
, destination
, args
, span
, sig
)?
{
306 // Pass the arguments
307 let mut arg_locals
= self.frame().mir
.args_iter();
308 trace
!("ABI: {:?}", sig
.abi
);
311 self.frame().mir
.args_iter().collect
::<Vec
<_
>>()
313 trace
!("args: {:?}", args
);
316 assert_eq
!(args
.len(), 2);
319 // write first argument
320 let first_local
= arg_locals
.next().unwrap();
321 let dest
= self.eval_place(&mir
::Place
::Local(first_local
))?
;
322 self.write_value(args
[0], dest
)?
;
325 // unpack and write all other args
326 let layout
= self.layout_of(args
[1].ty
)?
;
327 if let ty
::TyTuple(..) = args
[1].ty
.sty
{
328 if self.frame().mir
.args_iter().count() == layout
.fields
.count() + 1 {
329 match args
[1].value
{
330 Value
::ByRef(ptr
, align
) => {
331 for (i
, arg_local
) in arg_locals
.enumerate() {
332 let field
= layout
.field(&self, i
)?
;
333 let offset
= layout
.fields
.offset(i
).bytes();
334 let arg
= Value
::ByRef(ptr
.offset(offset
, &self)?
,
335 align
.min(field
.align
));
337 self.eval_place(&mir
::Place
::Local(arg_local
))?
;
339 "writing arg {:?} to {:?} (type: {})",
348 self.write_value(valty
, dest
)?
;
351 Value
::ByVal(PrimVal
::Undef
) => {}
353 trace
!("{:#?}, {:#?}", other
, layout
);
354 let mut layout
= layout
;
356 for i
in 0..layout
.fields
.count() {
357 let field
= layout
.field(&self, i
)?
;
358 if layout
.fields
.offset(i
).bytes() == 0 && layout
.size
== field
.size
{
365 let dest
= self.eval_place(&mir
::Place
::Local(
366 arg_locals
.next().unwrap(),
372 self.write_value(valty
, dest
)?
;
376 trace
!("manual impl of rust-call ABI");
377 // called a manual impl of a rust-call function
378 let dest
= self.eval_place(
379 &mir
::Place
::Local(arg_locals
.next().unwrap()),
381 self.write_value(args
[1], dest
)?
;
385 "rust-call ABI tuple argument was {:#?}, {:#?}",
392 for (arg_local
, &valty
) in arg_locals
.zip(args
) {
393 let dest
= self.eval_place(&mir
::Place
::Local(arg_local
))?
;
394 self.write_value(valty
, dest
)?
;
400 // cannot use the shim here, because that will only result in infinite recursion
401 ty
::InstanceDef
::Virtual(_
, idx
) => {
402 let ptr_size
= self.memory
.pointer_size();
403 let ptr_align
= self.tcx
.data_layout
.pointer_align
;
404 let (ptr
, vtable
) = self.into_ptr_vtable_pair(args
[0].value
)?
;
405 let fn_ptr
= self.memory
.read_ptr_sized_unsigned(
406 vtable
.offset(ptr_size
* (idx
as u64 + 3), &self)?
,
409 let instance
= self.memory
.get_fn(fn_ptr
)?
;
410 let mut args
= args
.to_vec();
411 let ty
= self.layout_of(args
[0].ty
)?
.field(&self, 0)?
.ty
;
413 args
[0].value
= ptr
.to_value();
414 // recurse with concrete function
415 self.eval_fn_call(instance
, destination
, &args
, span
, sig
)