]> git.proxmox.com Git - rustc.git/blame - compiler/rustc_const_eval/src/interpret/terminator.rs
New upstream version 1.58.1+dfsg1
[rustc.git] / compiler / rustc_const_eval / src / interpret / terminator.rs
CommitLineData
b7449926 1use std::borrow::Cow;
ba9703b0 2use std::convert::TryFrom;
b7449926 3
17df50a5 4use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
c295e0f8 5use rustc_middle::ty::layout::{self, LayoutOf as _, TyAndLayout};
ba9703b0 6use rustc_middle::ty::Instance;
17df50a5
XL
7use rustc_middle::{
8 mir,
9 ty::{self, Ty},
10};
c295e0f8 11use rustc_target::abi;
b7449926
XL
12use rustc_target::spec::abi::Abi;
13
b7449926 14use super::{
136023e0
XL
15 FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Scalar,
16 StackPopCleanup, StackPopUnwind,
b7449926
XL
17};
18
ba9703b0 19impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
17df50a5 20 fn fn_can_unwind(&self, attrs: CodegenFnAttrFlags, abi: Abi) -> bool {
94222f64 21 layout::fn_can_unwind(*self.tcx, attrs, abi)
17df50a5
XL
22 }
23
b7449926
XL
24 pub(super) fn eval_terminator(
25 &mut self,
26 terminator: &mir::Terminator<'tcx>,
dc9dc135 27 ) -> InterpResult<'tcx> {
ba9703b0 28 use rustc_middle::mir::TerminatorKind::*;
b7449926
XL
29 match terminator.kind {
30 Return => {
60c5eb7d 31 self.pop_stack_frame(/* unwinding */ false)?
b7449926
XL
32 }
33
60c5eb7d 34 Goto { target } => self.go_to_block(target),
b7449926 35
29967ef6 36 SwitchInt { ref discr, ref targets, switch_ty } => {
6a06907d 37 let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
b7449926 38 trace!("SwitchInt({:?})", *discr);
f035d41b 39 assert_eq!(discr.layout.ty, switch_ty);
b7449926
XL
40
41 // Branch to the `otherwise` case by default, if no match is found.
29967ef6
XL
42 assert!(!targets.iter().is_empty());
43 let mut target_block = targets.otherwise();
b7449926 44
29967ef6 45 for (const_int, target) in targets.iter() {
b7449926 46 // Compare using binary_op, to also support pointer values
dfeec247
XL
47 let res = self
48 .overflowing_binary_op(
49 mir::BinOp::Eq,
6a06907d
XL
50 &discr,
51 &ImmTy::from_uint(const_int, discr.layout),
dfeec247
XL
52 )?
53 .0;
b7449926 54 if res.to_bool()? {
29967ef6 55 target_block = target;
b7449926
XL
56 break;
57 }
58 }
59
60c5eb7d 60 self.go_to_block(target_block);
b7449926
XL
61 }
62
f035d41b 63 Call { ref func, ref args, destination, ref cleanup, from_hir_call: _, fn_span: _ } => {
ba9703b0 64 let old_stack = self.frame_idx();
f9f354fc 65 let old_loc = self.frame().loc;
b7449926 66 let func = self.eval_operand(func, None)?;
17df50a5 67 let (fn_val, abi, caller_can_unwind) = match *func.layout.ty.kind() {
b7449926
XL
68 ty::FnPtr(sig) => {
69 let caller_abi = sig.abi();
136023e0 70 let fn_ptr = self.read_pointer(&func)?;
94222f64 71 let fn_val = self.memory.get_fn(fn_ptr)?;
17df50a5
XL
72 (
73 fn_val,
74 caller_abi,
94222f64 75 self.fn_can_unwind(CodegenFnAttrFlags::empty(), caller_abi),
17df50a5 76 )
b7449926
XL
77 }
78 ty::FnDef(def_id, substs) => {
79 let sig = func.layout.ty.fn_sig(*self.tcx);
1b1a35ee
XL
80 (
81 FnVal::Instance(
82 self.resolve(ty::WithOptConstParam::unknown(def_id), substs)?,
83 ),
84 sig.abi(),
17df50a5 85 self.fn_can_unwind(self.tcx.codegen_fn_attrs(def_id).flags, sig.abi()),
1b1a35ee 86 )
b7449926 87 }
ba9703b0
XL
88 _ => span_bug!(
89 terminator.source_info.span,
90 "invalid callee of type {:?}",
91 func.layout.ty
92 ),
b7449926
XL
93 };
94 let args = self.eval_operands(args)?;
6a06907d 95 let dest_place;
60c5eb7d 96 let ret = match destination {
6a06907d
XL
97 Some((dest, ret)) => {
98 dest_place = self.eval_place(dest)?;
99 Some((&dest_place, ret))
100 }
60c5eb7d
XL
101 None => None,
102 };
17df50a5
XL
103 self.eval_fn_call(
104 fn_val,
105 abi,
106 &args[..],
107 ret,
108 match (cleanup, caller_can_unwind) {
109 (Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup),
110 (None, true) => StackPopUnwind::Skip,
111 (_, false) => StackPopUnwind::NotAllowed,
112 },
113 )?;
ba9703b0
XL
114 // Sanity-check that `eval_fn_call` either pushed a new frame or
115 // did a jump to another block.
f9f354fc 116 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
ba9703b0
XL
117 span_bug!(terminator.source_info.span, "evaluating this call made no progress");
118 }
b7449926
XL
119 }
120
f035d41b
XL
121 Drop { place, target, unwind } => {
122 let place = self.eval_place(place)?;
b7449926 123 let ty = place.layout.ty;
f035d41b 124 trace!("TerminatorKind::drop: {:?}, type {}", place, ty);
b7449926 125
dc9dc135 126 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
6a06907d 127 self.drop_in_place(&place, instance, target, unwind)?;
b7449926
XL
128 }
129
dfeec247
XL
130 Assert { ref cond, expected, ref msg, target, cleanup } => {
131 let cond_val =
6a06907d 132 self.read_immediate(&self.eval_operand(cond, None)?)?.to_scalar()?.to_bool()?;
b7449926 133 if expected == cond_val {
60c5eb7d 134 self.go_to_block(target);
b7449926 135 } else {
ba9703b0 136 M::assert_panic(self, msg, cleanup)?;
b7449926
XL
137 }
138 }
139
ba9703b0 140 Abort => {
fc512014 141 M::abort(self, "the program aborted execution".to_owned())?;
ba9703b0
XL
142 }
143
60c5eb7d
XL
144 // When we encounter Resume, we've finished unwinding
145 // cleanup for the current stack frame. We pop it in order
146 // to continue unwinding the next frame
147 Resume => {
148 trace!("unwinding: resuming from cleanup");
149 // By definition, a Resume terminator means
150 // that we're unwinding
151 self.pop_stack_frame(/* unwinding */ true)?;
dfeec247
XL
152 return Ok(());
153 }
60c5eb7d
XL
154
155 // It is UB to ever encounter this.
156 Unreachable => throw_ub!(Unreachable),
157
158 // These should never occur for MIR we actually run.
ba9703b0 159 DropAndReplace { .. }
f035d41b 160 | FalseEdge { .. }
ba9703b0
XL
161 | FalseUnwind { .. }
162 | Yield { .. }
163 | GeneratorDrop => span_bug!(
164 terminator.source_info.span,
165 "{:#?} should have been eliminated by MIR pass",
166 terminator.kind
167 ),
f9f354fc
XL
168
169 // Inline assembly can't be interpreted.
170 InlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"),
b7449926
XL
171 }
172
173 Ok(())
174 }
175
176 fn check_argument_compat(
a1dfa0c6 177 rust_abi: bool,
ba9703b0
XL
178 caller: TyAndLayout<'tcx>,
179 callee: TyAndLayout<'tcx>,
b7449926
XL
180 ) -> bool {
181 if caller.ty == callee.ty {
182 // No question
183 return true;
184 }
a1dfa0c6
XL
185 if !rust_abi {
186 // Don't risk anything
187 return false;
188 }
b7449926 189 // Compare layout
c295e0f8 190 match (caller.abi, callee.abi) {
a1dfa0c6
XL
191 // Different valid ranges are okay (once we enforce validity,
192 // that will take care to make it UB to leave the range, just
193 // like for transmute).
c295e0f8
XL
194 (abi::Abi::Scalar(caller), abi::Abi::Scalar(callee)) => caller.value == callee.value,
195 (abi::Abi::ScalarPair(caller1, caller2), abi::Abi::ScalarPair(callee1, callee2)) => {
196 caller1.value == callee1.value && caller2.value == callee2.value
dfeec247 197 }
b7449926 198 // Be conservative
dfeec247 199 _ => false,
b7449926
XL
200 }
201 }
202
203 /// Pass a single argument, checking the types for compatibility.
204 fn pass_argument(
205 &mut self,
a1dfa0c6 206 rust_abi: bool,
dfeec247 207 caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>,
6a06907d 208 callee_arg: &PlaceTy<'tcx, M::PointerTag>,
dc9dc135 209 ) -> InterpResult<'tcx> {
a1dfa0c6 210 if rust_abi && callee_arg.layout.is_zst() {
b7449926
XL
211 // Nothing to do.
212 trace!("Skipping callee ZST");
213 return Ok(());
214 }
ba9703b0
XL
215 let caller_arg = caller_arg.next().ok_or_else(|| {
216 err_ub_format!("calling a function with fewer arguments than it requires")
217 })?;
a1dfa0c6 218 if rust_abi {
74b04a01 219 assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
b7449926
XL
220 }
221 // Now, check
a1dfa0c6 222 if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
ba9703b0
XL
223 throw_ub_format!(
224 "calling a function with argument of type {:?} passing data of type {:?}",
225 callee_arg.layout.ty,
226 caller_arg.layout.ty
227 )
b7449926 228 }
0bf4aa26 229 // We allow some transmutes here
6a06907d 230 self.copy_op_transmute(&caller_arg, callee_arg)
b7449926
XL
231 }
232
233 /// Call this function -- pushing the stack frame and initializing the arguments.
c295e0f8 234 pub(crate) fn eval_fn_call(
b7449926 235 &mut self,
416331ca 236 fn_val: FnVal<'tcx, M::ExtraFnVal>,
b7449926 237 caller_abi: Abi,
0bf4aa26 238 args: &[OpTy<'tcx, M::PointerTag>],
6a06907d 239 ret: Option<(&PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
17df50a5 240 mut unwind: StackPopUnwind,
dc9dc135 241 ) -> InterpResult<'tcx> {
416331ca
XL
242 trace!("eval_fn_call: {:#?}", fn_val);
243
244 let instance = match fn_val {
245 FnVal::Instance(instance) => instance,
246 FnVal::Other(extra) => {
5869c6ff 247 return M::call_extra_fn(self, extra, caller_abi, args, ret, unwind);
416331ca
XL
248 }
249 };
b7449926 250
17df50a5
XL
251 let get_abi = |this: &Self, instance_ty: Ty<'tcx>| match instance_ty.kind() {
252 ty::FnDef(..) => instance_ty.fn_sig(*this.tcx).abi(),
253 ty::Closure(..) => Abi::RustCall,
254 ty::Generator(..) => Abi::Rust,
255 _ => span_bug!(this.cur_span(), "unexpected callee ty: {:?}", instance_ty),
256 };
257
60c5eb7d 258 // ABI check
17df50a5 259 let check_abi = |callee_abi: Abi| -> InterpResult<'tcx> {
60c5eb7d
XL
260 let normalize_abi = |abi| match abi {
261 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
dfeec247
XL
262 // These are all the same ABI, really.
263 {
264 Abi::Rust
265 }
266 abi => abi,
60c5eb7d
XL
267 };
268 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
ba9703b0 269 throw_ub_format!(
6a06907d
XL
270 "calling a function with ABI {} using caller ABI {}",
271 callee_abi.name(),
272 caller_abi.name()
ba9703b0 273 )
60c5eb7d 274 }
17df50a5
XL
275 Ok(())
276 };
60c5eb7d 277
b7449926
XL
278 match instance.def {
279 ty::InstanceDef::Intrinsic(..) => {
17df50a5
XL
280 if M::enforce_abi(self) {
281 check_abi(get_abi(self, instance.ty(*self.tcx, self.param_env)))?;
282 }
60c5eb7d 283 assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic);
ba9703b0 284 M::call_intrinsic(self, instance, args, ret, unwind)
b7449926 285 }
dfeec247
XL
286 ty::InstanceDef::VtableShim(..)
287 | ty::InstanceDef::ReifyShim(..)
288 | ty::InstanceDef::ClosureOnceShim { .. }
289 | ty::InstanceDef::FnPtrShim(..)
290 | ty::InstanceDef::DropGlue(..)
291 | ty::InstanceDef::CloneShim(..)
292 | ty::InstanceDef::Item(_) => {
b7449926 293 // We need MIR for this fn
5869c6ff
XL
294 let body =
295 match M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? {
296 Some(body) => body,
297 None => return Ok(()),
298 };
b7449926 299
17df50a5
XL
300 // Check against the ABI of the MIR body we are calling (not the ABI of `instance`;
301 // these can differ when `find_mir_or_eval_fn` does something clever like resolve
302 // exported symbol names).
303 let callee_def_id = body.source.def_id();
304 let callee_abi = get_abi(self, self.tcx.type_of(callee_def_id));
305
306 if M::enforce_abi(self) {
307 check_abi(callee_abi)?;
308 }
309
310 if !matches!(unwind, StackPopUnwind::NotAllowed)
311 && !self
312 .fn_can_unwind(self.tcx.codegen_fn_attrs(callee_def_id).flags, callee_abi)
313 {
314 // The callee cannot unwind.
315 unwind = StackPopUnwind::NotAllowed;
316 }
317
b7449926
XL
318 self.push_stack_frame(
319 instance,
dc9dc135 320 body,
60c5eb7d 321 ret.map(|p| p.0),
dfeec247 322 StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind },
b7449926
XL
323 )?;
324
ba9703b0
XL
325 // If an error is raised here, pop the frame again to get an accurate backtrace.
326 // To this end, we wrap it all in a `try` block.
327 let res: InterpResult<'tcx> = try {
328 trace!(
329 "caller ABI: {:?}, args: {:#?}",
330 caller_abi,
331 args.iter()
332 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
333 .collect::<Vec<_>>()
334 );
335 trace!(
336 "spread_arg: {:?}, locals: {:#?}",
337 body.spread_arg,
338 body.args_iter()
339 .map(|local| (
340 local,
341 self.layout_of_local(self.frame(), local, None).unwrap().ty
342 ))
343 .collect::<Vec<_>>()
344 );
345
346 // Figure out how to pass which arguments.
347 // The Rust ABI is special: ZST get skipped.
3c0e092e
XL
348 let rust_abi = matches!(caller_abi, Abi::Rust | Abi::RustCall);
349
ba9703b0
XL
350 // We have two iterators: Where the arguments come from,
351 // and where they go to.
352
353 // For where they come from: If the ABI is RustCall, we untuple the
354 // last incoming argument. These two iterators do not have the same type,
355 // so to keep the code paths uniform we accept an allocation
356 // (for RustCall ABI only).
357 let caller_args: Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
358 if caller_abi == Abi::RustCall && !args.is_empty() {
359 // Untuple
6a06907d 360 let (untuple_arg, args) = args.split_last().unwrap();
ba9703b0
XL
361 trace!("eval_fn_call: Will pass last argument by untupling");
362 Cow::from(
363 args.iter()
364 .map(|&a| Ok(a))
365 .chain(
366 (0..untuple_arg.layout.fields.count())
367 .map(|i| self.operand_field(untuple_arg, i)),
368 )
369 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>(
370 )?,
371 )
372 } else {
373 // Plain arg passing
374 Cow::from(args)
dfeec247 375 };
ba9703b0
XL
376 // Skip ZSTs
377 let mut caller_iter =
378 caller_args.iter().filter(|op| !rust_abi || !op.layout.is_zst()).copied();
379
380 // Now we have to spread them out across the callee's locals,
381 // taking into account the `spread_arg`. If we could write
382 // this is a single iterator (that handles `spread_arg`), then
383 // `pass_argument` would be the loop body. It takes care to
384 // not advance `caller_iter` for ZSTs.
385 for local in body.args_iter() {
386 let dest = self.eval_place(mir::Place::from(local))?;
387 if Some(local) == body.spread_arg {
388 // Must be a tuple
389 for i in 0..dest.layout.fields.count() {
6a06907d
XL
390 let dest = self.place_field(&dest, i)?;
391 self.pass_argument(rust_abi, &mut caller_iter, &dest)?;
b7449926 392 }
ba9703b0
XL
393 } else {
394 // Normal argument
6a06907d 395 self.pass_argument(rust_abi, &mut caller_iter, &dest)?;
b7449926 396 }
ba9703b0
XL
397 }
398 // Now we should have no more caller args
399 if caller_iter.next().is_some() {
400 throw_ub_format!("calling a function with more arguments than it expected")
401 }
402 // Don't forget to check the return type!
403 if let Some((caller_ret, _)) = ret {
404 let callee_ret = self.eval_place(mir::Place::return_place())?;
405 if !Self::check_argument_compat(
406 rust_abi,
407 caller_ret.layout,
408 callee_ret.layout,
409 ) {
410 throw_ub_format!(
411 "calling a function with return type {:?} passing \
412 return place of type {:?}",
413 callee_ret.layout.ty,
414 caller_ret.layout.ty
415 )
0bf4aa26 416 }
ba9703b0
XL
417 } else {
418 let local = mir::RETURN_PLACE;
419 let callee_layout = self.layout_of_local(self.frame(), local, None)?;
420 if !callee_layout.abi.is_uninhabited() {
421 throw_ub_format!("calling a returning function without a return place")
0bf4aa26 422 }
ba9703b0
XL
423 }
424 };
b7449926
XL
425 match res {
426 Err(err) => {
ba9703b0 427 self.stack_mut().pop();
b7449926
XL
428 Err(err)
429 }
ba9703b0 430 Ok(()) => Ok(()),
b7449926
XL
431 }
432 }
433 // cannot use the shim here, because that will only result in infinite recursion
434 ty::InstanceDef::Virtual(_, idx) => {
48663c56 435 let mut args = args.to_vec();
48663c56 436 // We have to implement all "object safe receivers". Currently we
1b1a35ee 437 // support built-in pointers `(&, &mut, Box)` as well as unsized-self. We do
48663c56 438 // not yet support custom self types.
1b1a35ee 439 // Also see `compiler/rustc_codegen_llvm/src/abi.rs` and `compiler/rustc_codegen_ssa/src/mir/block.rs`.
48663c56
XL
440 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
441 Some(_) => {
442 // Built-in pointer.
6a06907d 443 self.deref_operand(&args[0])?
48663c56
XL
444 }
445 None => {
446 // Unsized self.
136023e0 447 args[0].assert_mem_place()
48663c56
XL
448 }
449 };
450 // Find and consult vtable
136023e0
XL
451 let vtable = self.scalar_to_ptr(receiver_place.vtable());
452 let fn_val = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
b7449926 453
48663c56
XL
454 // `*mut receiver_place.layout.ty` is almost the layout that we
455 // want for args[0]: We have to project to field 0 because we want
456 // a thin pointer.
457 assert!(receiver_place.layout.is_unsized());
458 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
94222f64 459 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0);
48663c56 460 // Adjust receiver argument.
136023e0
XL
461 args[0] = OpTy::from(ImmTy::from_immediate(
462 Scalar::from_maybe_pointer(receiver_place.ptr, self).into(),
463 this_receiver_ptr,
464 ));
b7449926
XL
465 trace!("Patched self operand to {:#?}", args[0]);
466 // recurse with concrete function
136023e0 467 self.eval_fn_call(fn_val, caller_abi, &args, ret, unwind)
b7449926
XL
468 }
469 }
470 }
471
472 fn drop_in_place(
473 &mut self,
6a06907d 474 place: &PlaceTy<'tcx, M::PointerTag>,
b7449926 475 instance: ty::Instance<'tcx>,
b7449926 476 target: mir::BasicBlock,
dfeec247 477 unwind: Option<mir::BasicBlock>,
dc9dc135 478 ) -> InterpResult<'tcx> {
b7449926
XL
479 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
480 // We take the address of the object. This may well be unaligned, which is fine
481 // for us here. However, unaligned accesses will probably make the actual drop
482 // implementation fail -- a problem shared by rustc.
483 let place = self.force_allocation(place)?;
484
1b1a35ee 485 let (instance, place) = match place.layout.ty.kind() {
b7449926
XL
486 ty::Dynamic(..) => {
487 // Dropping a trait object.
6a06907d 488 self.unpack_dyn_trait(&place)?
b7449926
XL
489 }
490 _ => (instance, place),
491 };
492
ba9703b0 493 let arg = ImmTy::from_immediate(
136023e0 494 place.to_ref(self),
ba9703b0
XL
495 self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
496 );
b7449926
XL
497
498 let ty = self.tcx.mk_unit(); // return type is ()
136023e0 499 let dest = MPlaceTy::dangling(self.layout_of(ty)?);
b7449926
XL
500
501 self.eval_fn_call(
416331ca 502 FnVal::Instance(instance),
b7449926 503 Abi::Rust,
9fa01778 504 &[arg.into()],
6a06907d 505 Some((&dest.into(), target)),
17df50a5
XL
506 match unwind {
507 Some(cleanup) => StackPopUnwind::Cleanup(cleanup),
508 None => StackPopUnwind::Skip,
509 },
b7449926
XL
510 )
511 }
512}