]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/interpret/terminator.rs
New upstream version 1.49.0+dfsg1
[rustc.git] / compiler / rustc_mir / src / interpret / terminator.rs
1 use std::borrow::Cow;
2 use std::convert::TryFrom;
3
4 use rustc_middle::ty::layout::TyAndLayout;
5 use rustc_middle::ty::Instance;
6 use rustc_middle::{mir, ty};
7 use rustc_target::abi::{self, LayoutOf as _};
8 use rustc_target::spec::abi::Abi;
9
10 use super::{
11 FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, StackPopCleanup,
12 };
13
14 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
15 pub(super) fn eval_terminator(
16 &mut self,
17 terminator: &mir::Terminator<'tcx>,
18 ) -> InterpResult<'tcx> {
19 use rustc_middle::mir::TerminatorKind::*;
20 match terminator.kind {
21 Return => {
22 self.pop_stack_frame(/* unwinding */ false)?
23 }
24
25 Goto { target } => self.go_to_block(target),
26
27 SwitchInt { ref discr, ref targets, switch_ty } => {
28 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
29 trace!("SwitchInt({:?})", *discr);
30 assert_eq!(discr.layout.ty, switch_ty);
31
32 // Branch to the `otherwise` case by default, if no match is found.
33 assert!(!targets.iter().is_empty());
34 let mut target_block = targets.otherwise();
35
36 for (const_int, target) in targets.iter() {
37 // Compare using binary_op, to also support pointer values
38 let res = self
39 .overflowing_binary_op(
40 mir::BinOp::Eq,
41 discr,
42 ImmTy::from_uint(const_int, discr.layout),
43 )?
44 .0;
45 if res.to_bool()? {
46 target_block = target;
47 break;
48 }
49 }
50
51 self.go_to_block(target_block);
52 }
53
54 Call { ref func, ref args, destination, ref cleanup, from_hir_call: _, fn_span: _ } => {
55 let old_stack = self.frame_idx();
56 let old_loc = self.frame().loc;
57 let func = self.eval_operand(func, None)?;
58 let (fn_val, abi) = match *func.layout.ty.kind() {
59 ty::FnPtr(sig) => {
60 let caller_abi = sig.abi();
61 let fn_ptr = self.read_scalar(func)?.check_init()?;
62 let fn_val = self.memory.get_fn(fn_ptr)?;
63 (fn_val, caller_abi)
64 }
65 ty::FnDef(def_id, substs) => {
66 let sig = func.layout.ty.fn_sig(*self.tcx);
67 (
68 FnVal::Instance(
69 self.resolve(ty::WithOptConstParam::unknown(def_id), substs)?,
70 ),
71 sig.abi(),
72 )
73 }
74 _ => span_bug!(
75 terminator.source_info.span,
76 "invalid callee of type {:?}",
77 func.layout.ty
78 ),
79 };
80 let args = self.eval_operands(args)?;
81 let ret = match destination {
82 Some((dest, ret)) => Some((self.eval_place(dest)?, ret)),
83 None => None,
84 };
85 self.eval_fn_call(fn_val, abi, &args[..], ret, *cleanup)?;
86 // Sanity-check that `eval_fn_call` either pushed a new frame or
87 // did a jump to another block.
88 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
89 span_bug!(terminator.source_info.span, "evaluating this call made no progress");
90 }
91 }
92
93 Drop { place, target, unwind } => {
94 let place = self.eval_place(place)?;
95 let ty = place.layout.ty;
96 trace!("TerminatorKind::drop: {:?}, type {}", place, ty);
97
98 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
99 self.drop_in_place(place, instance, target, unwind)?;
100 }
101
102 Assert { ref cond, expected, ref msg, target, cleanup } => {
103 let cond_val =
104 self.read_immediate(self.eval_operand(cond, None)?)?.to_scalar()?.to_bool()?;
105 if expected == cond_val {
106 self.go_to_block(target);
107 } else {
108 M::assert_panic(self, msg, cleanup)?;
109 }
110 }
111
112 Abort => {
113 M::abort(self)?;
114 }
115
116 // When we encounter Resume, we've finished unwinding
117 // cleanup for the current stack frame. We pop it in order
118 // to continue unwinding the next frame
119 Resume => {
120 trace!("unwinding: resuming from cleanup");
121 // By definition, a Resume terminator means
122 // that we're unwinding
123 self.pop_stack_frame(/* unwinding */ true)?;
124 return Ok(());
125 }
126
127 // It is UB to ever encounter this.
128 Unreachable => throw_ub!(Unreachable),
129
130 // These should never occur for MIR we actually run.
131 DropAndReplace { .. }
132 | FalseEdge { .. }
133 | FalseUnwind { .. }
134 | Yield { .. }
135 | GeneratorDrop => span_bug!(
136 terminator.source_info.span,
137 "{:#?} should have been eliminated by MIR pass",
138 terminator.kind
139 ),
140
141 // Inline assembly can't be interpreted.
142 InlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"),
143 }
144
145 Ok(())
146 }
147
148 fn check_argument_compat(
149 rust_abi: bool,
150 caller: TyAndLayout<'tcx>,
151 callee: TyAndLayout<'tcx>,
152 ) -> bool {
153 if caller.ty == callee.ty {
154 // No question
155 return true;
156 }
157 if !rust_abi {
158 // Don't risk anything
159 return false;
160 }
161 // Compare layout
162 match (&caller.abi, &callee.abi) {
163 // Different valid ranges are okay (once we enforce validity,
164 // that will take care to make it UB to leave the range, just
165 // like for transmute).
166 (abi::Abi::Scalar(ref caller), abi::Abi::Scalar(ref callee)) => {
167 caller.value == callee.value
168 }
169 (
170 abi::Abi::ScalarPair(ref caller1, ref caller2),
171 abi::Abi::ScalarPair(ref callee1, ref callee2),
172 ) => caller1.value == callee1.value && caller2.value == callee2.value,
173 // Be conservative
174 _ => false,
175 }
176 }
177
178 /// Pass a single argument, checking the types for compatibility.
179 fn pass_argument(
180 &mut self,
181 rust_abi: bool,
182 caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>,
183 callee_arg: PlaceTy<'tcx, M::PointerTag>,
184 ) -> InterpResult<'tcx> {
185 if rust_abi && callee_arg.layout.is_zst() {
186 // Nothing to do.
187 trace!("Skipping callee ZST");
188 return Ok(());
189 }
190 let caller_arg = caller_arg.next().ok_or_else(|| {
191 err_ub_format!("calling a function with fewer arguments than it requires")
192 })?;
193 if rust_abi {
194 assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
195 }
196 // Now, check
197 if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
198 throw_ub_format!(
199 "calling a function with argument of type {:?} passing data of type {:?}",
200 callee_arg.layout.ty,
201 caller_arg.layout.ty
202 )
203 }
204 // We allow some transmutes here
205 self.copy_op_transmute(caller_arg, callee_arg)
206 }
207
208 /// Call this function -- pushing the stack frame and initializing the arguments.
209 fn eval_fn_call(
210 &mut self,
211 fn_val: FnVal<'tcx, M::ExtraFnVal>,
212 caller_abi: Abi,
213 args: &[OpTy<'tcx, M::PointerTag>],
214 ret: Option<(PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
215 unwind: Option<mir::BasicBlock>,
216 ) -> InterpResult<'tcx> {
217 trace!("eval_fn_call: {:#?}", fn_val);
218
219 let instance = match fn_val {
220 FnVal::Instance(instance) => instance,
221 FnVal::Other(extra) => {
222 return M::call_extra_fn(self, extra, args, ret, unwind);
223 }
224 };
225
226 // ABI check
227 {
228 let callee_abi = {
229 let instance_ty = instance.ty(*self.tcx, self.param_env);
230 match instance_ty.kind() {
231 ty::FnDef(..) => instance_ty.fn_sig(*self.tcx).abi(),
232 ty::Closure(..) => Abi::RustCall,
233 ty::Generator(..) => Abi::Rust,
234 _ => span_bug!(self.cur_span(), "unexpected callee ty: {:?}", instance_ty),
235 }
236 };
237 let normalize_abi = |abi| match abi {
238 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
239 // These are all the same ABI, really.
240 {
241 Abi::Rust
242 }
243 abi => abi,
244 };
245 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
246 throw_ub_format!(
247 "calling a function with ABI {:?} using caller ABI {:?}",
248 callee_abi,
249 caller_abi
250 )
251 }
252 }
253
254 match instance.def {
255 ty::InstanceDef::Intrinsic(..) => {
256 assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic);
257 M::call_intrinsic(self, instance, args, ret, unwind)
258 }
259 ty::InstanceDef::VtableShim(..)
260 | ty::InstanceDef::ReifyShim(..)
261 | ty::InstanceDef::ClosureOnceShim { .. }
262 | ty::InstanceDef::FnPtrShim(..)
263 | ty::InstanceDef::DropGlue(..)
264 | ty::InstanceDef::CloneShim(..)
265 | ty::InstanceDef::Item(_) => {
266 // We need MIR for this fn
267 let body = match M::find_mir_or_eval_fn(self, instance, args, ret, unwind)? {
268 Some(body) => body,
269 None => return Ok(()),
270 };
271
272 self.push_stack_frame(
273 instance,
274 body,
275 ret.map(|p| p.0),
276 StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind },
277 )?;
278
279 // If an error is raised here, pop the frame again to get an accurate backtrace.
280 // To this end, we wrap it all in a `try` block.
281 let res: InterpResult<'tcx> = try {
282 trace!(
283 "caller ABI: {:?}, args: {:#?}",
284 caller_abi,
285 args.iter()
286 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
287 .collect::<Vec<_>>()
288 );
289 trace!(
290 "spread_arg: {:?}, locals: {:#?}",
291 body.spread_arg,
292 body.args_iter()
293 .map(|local| (
294 local,
295 self.layout_of_local(self.frame(), local, None).unwrap().ty
296 ))
297 .collect::<Vec<_>>()
298 );
299
300 // Figure out how to pass which arguments.
301 // The Rust ABI is special: ZST get skipped.
302 let rust_abi = match caller_abi {
303 Abi::Rust | Abi::RustCall => true,
304 _ => false,
305 };
306 // We have two iterators: Where the arguments come from,
307 // and where they go to.
308
309 // For where they come from: If the ABI is RustCall, we untuple the
310 // last incoming argument. These two iterators do not have the same type,
311 // so to keep the code paths uniform we accept an allocation
312 // (for RustCall ABI only).
313 let caller_args: Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
314 if caller_abi == Abi::RustCall && !args.is_empty() {
315 // Untuple
316 let (&untuple_arg, args) = args.split_last().unwrap();
317 trace!("eval_fn_call: Will pass last argument by untupling");
318 Cow::from(
319 args.iter()
320 .map(|&a| Ok(a))
321 .chain(
322 (0..untuple_arg.layout.fields.count())
323 .map(|i| self.operand_field(untuple_arg, i)),
324 )
325 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>(
326 )?,
327 )
328 } else {
329 // Plain arg passing
330 Cow::from(args)
331 };
332 // Skip ZSTs
333 let mut caller_iter =
334 caller_args.iter().filter(|op| !rust_abi || !op.layout.is_zst()).copied();
335
336 // Now we have to spread them out across the callee's locals,
337 // taking into account the `spread_arg`. If we could write
338 // this is a single iterator (that handles `spread_arg`), then
339 // `pass_argument` would be the loop body. It takes care to
340 // not advance `caller_iter` for ZSTs.
341 for local in body.args_iter() {
342 let dest = self.eval_place(mir::Place::from(local))?;
343 if Some(local) == body.spread_arg {
344 // Must be a tuple
345 for i in 0..dest.layout.fields.count() {
346 let dest = self.place_field(dest, i)?;
347 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
348 }
349 } else {
350 // Normal argument
351 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
352 }
353 }
354 // Now we should have no more caller args
355 if caller_iter.next().is_some() {
356 throw_ub_format!("calling a function with more arguments than it expected")
357 }
358 // Don't forget to check the return type!
359 if let Some((caller_ret, _)) = ret {
360 let callee_ret = self.eval_place(mir::Place::return_place())?;
361 if !Self::check_argument_compat(
362 rust_abi,
363 caller_ret.layout,
364 callee_ret.layout,
365 ) {
366 throw_ub_format!(
367 "calling a function with return type {:?} passing \
368 return place of type {:?}",
369 callee_ret.layout.ty,
370 caller_ret.layout.ty
371 )
372 }
373 } else {
374 let local = mir::RETURN_PLACE;
375 let callee_layout = self.layout_of_local(self.frame(), local, None)?;
376 if !callee_layout.abi.is_uninhabited() {
377 throw_ub_format!("calling a returning function without a return place")
378 }
379 }
380 };
381 match res {
382 Err(err) => {
383 self.stack_mut().pop();
384 Err(err)
385 }
386 Ok(()) => Ok(()),
387 }
388 }
389 // cannot use the shim here, because that will only result in infinite recursion
390 ty::InstanceDef::Virtual(_, idx) => {
391 let mut args = args.to_vec();
392 // We have to implement all "object safe receivers". Currently we
393 // support built-in pointers `(&, &mut, Box)` as well as unsized-self. We do
394 // not yet support custom self types.
395 // Also see `compiler/rustc_codegen_llvm/src/abi.rs` and `compiler/rustc_codegen_ssa/src/mir/block.rs`.
396 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
397 Some(_) => {
398 // Built-in pointer.
399 self.deref_operand(args[0])?
400 }
401 None => {
402 // Unsized self.
403 args[0].assert_mem_place(self)
404 }
405 };
406 // Find and consult vtable
407 let vtable = receiver_place.vtable();
408 let drop_fn = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
409
410 // `*mut receiver_place.layout.ty` is almost the layout that we
411 // want for args[0]: We have to project to field 0 because we want
412 // a thin pointer.
413 assert!(receiver_place.layout.is_unsized());
414 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
415 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
416 // Adjust receiver argument.
417 args[0] =
418 OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr));
419 trace!("Patched self operand to {:#?}", args[0]);
420 // recurse with concrete function
421 self.eval_fn_call(drop_fn, caller_abi, &args, ret, unwind)
422 }
423 }
424 }
425
426 fn drop_in_place(
427 &mut self,
428 place: PlaceTy<'tcx, M::PointerTag>,
429 instance: ty::Instance<'tcx>,
430 target: mir::BasicBlock,
431 unwind: Option<mir::BasicBlock>,
432 ) -> InterpResult<'tcx> {
433 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
434 // We take the address of the object. This may well be unaligned, which is fine
435 // for us here. However, unaligned accesses will probably make the actual drop
436 // implementation fail -- a problem shared by rustc.
437 let place = self.force_allocation(place)?;
438
439 let (instance, place) = match place.layout.ty.kind() {
440 ty::Dynamic(..) => {
441 // Dropping a trait object.
442 self.unpack_dyn_trait(place)?
443 }
444 _ => (instance, place),
445 };
446
447 let arg = ImmTy::from_immediate(
448 place.to_ref(),
449 self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
450 );
451
452 let ty = self.tcx.mk_unit(); // return type is ()
453 let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
454
455 self.eval_fn_call(
456 FnVal::Instance(instance),
457 Abi::Rust,
458 &[arg.into()],
459 Some((dest.into(), target)),
460 unwind,
461 )
462 }
463 }