]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/interpret/terminator.rs
New upstream version 1.54.0+dfsg1
[rustc.git] / compiler / rustc_mir / src / interpret / terminator.rs
1 use std::borrow::Cow;
2 use std::convert::TryFrom;
3
4 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
5 use rustc_middle::ty::layout::{self, TyAndLayout};
6 use rustc_middle::ty::Instance;
7 use rustc_middle::{
8 mir,
9 ty::{self, Ty},
10 };
11 use rustc_target::abi::{self, LayoutOf as _};
12 use rustc_target::spec::abi::Abi;
13
14 use super::{
15 FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, StackPopCleanup,
16 StackPopUnwind,
17 };
18
19 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
20 fn fn_can_unwind(&self, attrs: CodegenFnAttrFlags, abi: Abi) -> bool {
21 layout::fn_can_unwind(
22 self.tcx.sess.panic_strategy(),
23 attrs,
24 layout::conv_from_spec_abi(*self.tcx, abi),
25 abi,
26 )
27 }
28
29 pub(super) fn eval_terminator(
30 &mut self,
31 terminator: &mir::Terminator<'tcx>,
32 ) -> InterpResult<'tcx> {
33 use rustc_middle::mir::TerminatorKind::*;
34 match terminator.kind {
35 Return => {
36 self.pop_stack_frame(/* unwinding */ false)?
37 }
38
39 Goto { target } => self.go_to_block(target),
40
41 SwitchInt { ref discr, ref targets, switch_ty } => {
42 let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
43 trace!("SwitchInt({:?})", *discr);
44 assert_eq!(discr.layout.ty, switch_ty);
45
46 // Branch to the `otherwise` case by default, if no match is found.
47 assert!(!targets.iter().is_empty());
48 let mut target_block = targets.otherwise();
49
50 for (const_int, target) in targets.iter() {
51 // Compare using binary_op, to also support pointer values
52 let res = self
53 .overflowing_binary_op(
54 mir::BinOp::Eq,
55 &discr,
56 &ImmTy::from_uint(const_int, discr.layout),
57 )?
58 .0;
59 if res.to_bool()? {
60 target_block = target;
61 break;
62 }
63 }
64
65 self.go_to_block(target_block);
66 }
67
68 Call { ref func, ref args, destination, ref cleanup, from_hir_call: _, fn_span: _ } => {
69 let old_stack = self.frame_idx();
70 let old_loc = self.frame().loc;
71 let func = self.eval_operand(func, None)?;
72 let (fn_val, abi, caller_can_unwind) = match *func.layout.ty.kind() {
73 ty::FnPtr(sig) => {
74 let caller_abi = sig.abi();
75 let fn_ptr = self.read_scalar(&func)?.check_init()?;
76 let fn_val = self.memory.get_fn(fn_ptr)?;
77 (
78 fn_val,
79 caller_abi,
80 self.fn_can_unwind(layout::fn_ptr_codegen_fn_attr_flags(), caller_abi),
81 )
82 }
83 ty::FnDef(def_id, substs) => {
84 let sig = func.layout.ty.fn_sig(*self.tcx);
85 (
86 FnVal::Instance(
87 self.resolve(ty::WithOptConstParam::unknown(def_id), substs)?,
88 ),
89 sig.abi(),
90 self.fn_can_unwind(self.tcx.codegen_fn_attrs(def_id).flags, sig.abi()),
91 )
92 }
93 _ => span_bug!(
94 terminator.source_info.span,
95 "invalid callee of type {:?}",
96 func.layout.ty
97 ),
98 };
99 let args = self.eval_operands(args)?;
100 let dest_place;
101 let ret = match destination {
102 Some((dest, ret)) => {
103 dest_place = self.eval_place(dest)?;
104 Some((&dest_place, ret))
105 }
106 None => None,
107 };
108 self.eval_fn_call(
109 fn_val,
110 abi,
111 &args[..],
112 ret,
113 match (cleanup, caller_can_unwind) {
114 (Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup),
115 (None, true) => StackPopUnwind::Skip,
116 (_, false) => StackPopUnwind::NotAllowed,
117 },
118 )?;
119 // Sanity-check that `eval_fn_call` either pushed a new frame or
120 // did a jump to another block.
121 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
122 span_bug!(terminator.source_info.span, "evaluating this call made no progress");
123 }
124 }
125
126 Drop { place, target, unwind } => {
127 let place = self.eval_place(place)?;
128 let ty = place.layout.ty;
129 trace!("TerminatorKind::drop: {:?}, type {}", place, ty);
130
131 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
132 self.drop_in_place(&place, instance, target, unwind)?;
133 }
134
135 Assert { ref cond, expected, ref msg, target, cleanup } => {
136 let cond_val =
137 self.read_immediate(&self.eval_operand(cond, None)?)?.to_scalar()?.to_bool()?;
138 if expected == cond_val {
139 self.go_to_block(target);
140 } else {
141 M::assert_panic(self, msg, cleanup)?;
142 }
143 }
144
145 Abort => {
146 M::abort(self, "the program aborted execution".to_owned())?;
147 }
148
149 // When we encounter Resume, we've finished unwinding
150 // cleanup for the current stack frame. We pop it in order
151 // to continue unwinding the next frame
152 Resume => {
153 trace!("unwinding: resuming from cleanup");
154 // By definition, a Resume terminator means
155 // that we're unwinding
156 self.pop_stack_frame(/* unwinding */ true)?;
157 return Ok(());
158 }
159
160 // It is UB to ever encounter this.
161 Unreachable => throw_ub!(Unreachable),
162
163 // These should never occur for MIR we actually run.
164 DropAndReplace { .. }
165 | FalseEdge { .. }
166 | FalseUnwind { .. }
167 | Yield { .. }
168 | GeneratorDrop => span_bug!(
169 terminator.source_info.span,
170 "{:#?} should have been eliminated by MIR pass",
171 terminator.kind
172 ),
173
174 // Inline assembly can't be interpreted.
175 InlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"),
176 }
177
178 Ok(())
179 }
180
181 fn check_argument_compat(
182 rust_abi: bool,
183 caller: TyAndLayout<'tcx>,
184 callee: TyAndLayout<'tcx>,
185 ) -> bool {
186 if caller.ty == callee.ty {
187 // No question
188 return true;
189 }
190 if !rust_abi {
191 // Don't risk anything
192 return false;
193 }
194 // Compare layout
195 match (&caller.abi, &callee.abi) {
196 // Different valid ranges are okay (once we enforce validity,
197 // that will take care to make it UB to leave the range, just
198 // like for transmute).
199 (abi::Abi::Scalar(ref caller), abi::Abi::Scalar(ref callee)) => {
200 caller.value == callee.value
201 }
202 (
203 abi::Abi::ScalarPair(ref caller1, ref caller2),
204 abi::Abi::ScalarPair(ref callee1, ref callee2),
205 ) => caller1.value == callee1.value && caller2.value == callee2.value,
206 // Be conservative
207 _ => false,
208 }
209 }
210
211 /// Pass a single argument, checking the types for compatibility.
212 fn pass_argument(
213 &mut self,
214 rust_abi: bool,
215 caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>,
216 callee_arg: &PlaceTy<'tcx, M::PointerTag>,
217 ) -> InterpResult<'tcx> {
218 if rust_abi && callee_arg.layout.is_zst() {
219 // Nothing to do.
220 trace!("Skipping callee ZST");
221 return Ok(());
222 }
223 let caller_arg = caller_arg.next().ok_or_else(|| {
224 err_ub_format!("calling a function with fewer arguments than it requires")
225 })?;
226 if rust_abi {
227 assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
228 }
229 // Now, check
230 if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
231 throw_ub_format!(
232 "calling a function with argument of type {:?} passing data of type {:?}",
233 callee_arg.layout.ty,
234 caller_arg.layout.ty
235 )
236 }
237 // We allow some transmutes here
238 self.copy_op_transmute(&caller_arg, callee_arg)
239 }
240
241 /// Call this function -- pushing the stack frame and initializing the arguments.
242 fn eval_fn_call(
243 &mut self,
244 fn_val: FnVal<'tcx, M::ExtraFnVal>,
245 caller_abi: Abi,
246 args: &[OpTy<'tcx, M::PointerTag>],
247 ret: Option<(&PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
248 mut unwind: StackPopUnwind,
249 ) -> InterpResult<'tcx> {
250 trace!("eval_fn_call: {:#?}", fn_val);
251
252 let instance = match fn_val {
253 FnVal::Instance(instance) => instance,
254 FnVal::Other(extra) => {
255 return M::call_extra_fn(self, extra, caller_abi, args, ret, unwind);
256 }
257 };
258
259 let get_abi = |this: &Self, instance_ty: Ty<'tcx>| match instance_ty.kind() {
260 ty::FnDef(..) => instance_ty.fn_sig(*this.tcx).abi(),
261 ty::Closure(..) => Abi::RustCall,
262 ty::Generator(..) => Abi::Rust,
263 _ => span_bug!(this.cur_span(), "unexpected callee ty: {:?}", instance_ty),
264 };
265
266 // ABI check
267 let check_abi = |callee_abi: Abi| -> InterpResult<'tcx> {
268 let normalize_abi = |abi| match abi {
269 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
270 // These are all the same ABI, really.
271 {
272 Abi::Rust
273 }
274 abi => abi,
275 };
276 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
277 throw_ub_format!(
278 "calling a function with ABI {} using caller ABI {}",
279 callee_abi.name(),
280 caller_abi.name()
281 )
282 }
283 Ok(())
284 };
285
286 match instance.def {
287 ty::InstanceDef::Intrinsic(..) => {
288 if M::enforce_abi(self) {
289 check_abi(get_abi(self, instance.ty(*self.tcx, self.param_env)))?;
290 }
291 assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic);
292 M::call_intrinsic(self, instance, args, ret, unwind)
293 }
294 ty::InstanceDef::VtableShim(..)
295 | ty::InstanceDef::ReifyShim(..)
296 | ty::InstanceDef::ClosureOnceShim { .. }
297 | ty::InstanceDef::FnPtrShim(..)
298 | ty::InstanceDef::DropGlue(..)
299 | ty::InstanceDef::CloneShim(..)
300 | ty::InstanceDef::Item(_) => {
301 // We need MIR for this fn
302 let body =
303 match M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? {
304 Some(body) => body,
305 None => return Ok(()),
306 };
307
308 // Check against the ABI of the MIR body we are calling (not the ABI of `instance`;
309 // these can differ when `find_mir_or_eval_fn` does something clever like resolve
310 // exported symbol names).
311 let callee_def_id = body.source.def_id();
312 let callee_abi = get_abi(self, self.tcx.type_of(callee_def_id));
313
314 if M::enforce_abi(self) {
315 check_abi(callee_abi)?;
316 }
317
318 if !matches!(unwind, StackPopUnwind::NotAllowed)
319 && !self
320 .fn_can_unwind(self.tcx.codegen_fn_attrs(callee_def_id).flags, callee_abi)
321 {
322 // The callee cannot unwind.
323 unwind = StackPopUnwind::NotAllowed;
324 }
325
326 self.push_stack_frame(
327 instance,
328 body,
329 ret.map(|p| p.0),
330 StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind },
331 )?;
332
333 // If an error is raised here, pop the frame again to get an accurate backtrace.
334 // To this end, we wrap it all in a `try` block.
335 let res: InterpResult<'tcx> = try {
336 trace!(
337 "caller ABI: {:?}, args: {:#?}",
338 caller_abi,
339 args.iter()
340 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
341 .collect::<Vec<_>>()
342 );
343 trace!(
344 "spread_arg: {:?}, locals: {:#?}",
345 body.spread_arg,
346 body.args_iter()
347 .map(|local| (
348 local,
349 self.layout_of_local(self.frame(), local, None).unwrap().ty
350 ))
351 .collect::<Vec<_>>()
352 );
353
354 // Figure out how to pass which arguments.
355 // The Rust ABI is special: ZST get skipped.
356 let rust_abi = match caller_abi {
357 Abi::Rust | Abi::RustCall => true,
358 _ => false,
359 };
360 // We have two iterators: Where the arguments come from,
361 // and where they go to.
362
363 // For where they come from: If the ABI is RustCall, we untuple the
364 // last incoming argument. These two iterators do not have the same type,
365 // so to keep the code paths uniform we accept an allocation
366 // (for RustCall ABI only).
367 let caller_args: Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
368 if caller_abi == Abi::RustCall && !args.is_empty() {
369 // Untuple
370 let (untuple_arg, args) = args.split_last().unwrap();
371 trace!("eval_fn_call: Will pass last argument by untupling");
372 Cow::from(
373 args.iter()
374 .map(|&a| Ok(a))
375 .chain(
376 (0..untuple_arg.layout.fields.count())
377 .map(|i| self.operand_field(untuple_arg, i)),
378 )
379 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>(
380 )?,
381 )
382 } else {
383 // Plain arg passing
384 Cow::from(args)
385 };
386 // Skip ZSTs
387 let mut caller_iter =
388 caller_args.iter().filter(|op| !rust_abi || !op.layout.is_zst()).copied();
389
390 // Now we have to spread them out across the callee's locals,
391 // taking into account the `spread_arg`. If we could write
392 // this is a single iterator (that handles `spread_arg`), then
393 // `pass_argument` would be the loop body. It takes care to
394 // not advance `caller_iter` for ZSTs.
395 for local in body.args_iter() {
396 let dest = self.eval_place(mir::Place::from(local))?;
397 if Some(local) == body.spread_arg {
398 // Must be a tuple
399 for i in 0..dest.layout.fields.count() {
400 let dest = self.place_field(&dest, i)?;
401 self.pass_argument(rust_abi, &mut caller_iter, &dest)?;
402 }
403 } else {
404 // Normal argument
405 self.pass_argument(rust_abi, &mut caller_iter, &dest)?;
406 }
407 }
408 // Now we should have no more caller args
409 if caller_iter.next().is_some() {
410 throw_ub_format!("calling a function with more arguments than it expected")
411 }
412 // Don't forget to check the return type!
413 if let Some((caller_ret, _)) = ret {
414 let callee_ret = self.eval_place(mir::Place::return_place())?;
415 if !Self::check_argument_compat(
416 rust_abi,
417 caller_ret.layout,
418 callee_ret.layout,
419 ) {
420 throw_ub_format!(
421 "calling a function with return type {:?} passing \
422 return place of type {:?}",
423 callee_ret.layout.ty,
424 caller_ret.layout.ty
425 )
426 }
427 } else {
428 let local = mir::RETURN_PLACE;
429 let callee_layout = self.layout_of_local(self.frame(), local, None)?;
430 if !callee_layout.abi.is_uninhabited() {
431 throw_ub_format!("calling a returning function without a return place")
432 }
433 }
434 };
435 match res {
436 Err(err) => {
437 self.stack_mut().pop();
438 Err(err)
439 }
440 Ok(()) => Ok(()),
441 }
442 }
443 // cannot use the shim here, because that will only result in infinite recursion
444 ty::InstanceDef::Virtual(_, idx) => {
445 let mut args = args.to_vec();
446 // We have to implement all "object safe receivers". Currently we
447 // support built-in pointers `(&, &mut, Box)` as well as unsized-self. We do
448 // not yet support custom self types.
449 // Also see `compiler/rustc_codegen_llvm/src/abi.rs` and `compiler/rustc_codegen_ssa/src/mir/block.rs`.
450 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
451 Some(_) => {
452 // Built-in pointer.
453 self.deref_operand(&args[0])?
454 }
455 None => {
456 // Unsized self.
457 args[0].assert_mem_place(self)
458 }
459 };
460 // Find and consult vtable
461 let vtable = receiver_place.vtable();
462 let drop_fn = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
463
464 // `*mut receiver_place.layout.ty` is almost the layout that we
465 // want for args[0]: We have to project to field 0 because we want
466 // a thin pointer.
467 assert!(receiver_place.layout.is_unsized());
468 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
469 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
470 // Adjust receiver argument.
471 args[0] =
472 OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr));
473 trace!("Patched self operand to {:#?}", args[0]);
474 // recurse with concrete function
475 self.eval_fn_call(drop_fn, caller_abi, &args, ret, unwind)
476 }
477 }
478 }
479
480 fn drop_in_place(
481 &mut self,
482 place: &PlaceTy<'tcx, M::PointerTag>,
483 instance: ty::Instance<'tcx>,
484 target: mir::BasicBlock,
485 unwind: Option<mir::BasicBlock>,
486 ) -> InterpResult<'tcx> {
487 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
488 // We take the address of the object. This may well be unaligned, which is fine
489 // for us here. However, unaligned accesses will probably make the actual drop
490 // implementation fail -- a problem shared by rustc.
491 let place = self.force_allocation(place)?;
492
493 let (instance, place) = match place.layout.ty.kind() {
494 ty::Dynamic(..) => {
495 // Dropping a trait object.
496 self.unpack_dyn_trait(&place)?
497 }
498 _ => (instance, place),
499 };
500
501 let arg = ImmTy::from_immediate(
502 place.to_ref(),
503 self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
504 );
505
506 let ty = self.tcx.mk_unit(); // return type is ()
507 let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
508
509 self.eval_fn_call(
510 FnVal::Instance(instance),
511 Abi::Rust,
512 &[arg.into()],
513 Some((&dest.into(), target)),
514 match unwind {
515 Some(cleanup) => StackPopUnwind::Cleanup(cleanup),
516 None => StackPopUnwind::Skip,
517 },
518 )
519 }
520 }