]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/interpret/terminator.rs
New upstream version 1.45.0+dfsg1
[rustc.git] / src / librustc_mir / interpret / terminator.rs
1 use std::borrow::Cow;
2 use std::convert::TryFrom;
3
4 use rustc_middle::ty::layout::TyAndLayout;
5 use rustc_middle::ty::Instance;
6 use rustc_middle::{mir, ty};
7 use rustc_target::abi::{self, LayoutOf as _};
8 use rustc_target::spec::abi::Abi;
9
10 use super::{
11 FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, StackPopCleanup,
12 };
13
14 impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
15 pub(super) fn eval_terminator(
16 &mut self,
17 terminator: &mir::Terminator<'tcx>,
18 ) -> InterpResult<'tcx> {
19 use rustc_middle::mir::TerminatorKind::*;
20 match terminator.kind {
21 Return => {
22 self.pop_stack_frame(/* unwinding */ false)?
23 }
24
25 Goto { target } => self.go_to_block(target),
26
27 SwitchInt { ref discr, ref values, ref targets, .. } => {
28 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
29 trace!("SwitchInt({:?})", *discr);
30
31 // Branch to the `otherwise` case by default, if no match is found.
32 assert!(!targets.is_empty());
33 let mut target_block = targets[targets.len() - 1];
34
35 for (index, &const_int) in values.iter().enumerate() {
36 // Compare using binary_op, to also support pointer values
37 let res = self
38 .overflowing_binary_op(
39 mir::BinOp::Eq,
40 discr,
41 ImmTy::from_uint(const_int, discr.layout),
42 )?
43 .0;
44 if res.to_bool()? {
45 target_block = targets[index];
46 break;
47 }
48 }
49
50 self.go_to_block(target_block);
51 }
52
53 Call { ref func, ref args, destination, ref cleanup, .. } => {
54 let old_stack = self.frame_idx();
55 let old_loc = self.frame().loc;
56 let func = self.eval_operand(func, None)?;
57 let (fn_val, abi) = match func.layout.ty.kind {
58 ty::FnPtr(sig) => {
59 let caller_abi = sig.abi();
60 let fn_ptr = self.read_scalar(func)?.not_undef()?;
61 let fn_val = self.memory.get_fn(fn_ptr)?;
62 (fn_val, caller_abi)
63 }
64 ty::FnDef(def_id, substs) => {
65 let sig = func.layout.ty.fn_sig(*self.tcx);
66 (FnVal::Instance(self.resolve(def_id, substs)?), sig.abi())
67 }
68 _ => span_bug!(
69 terminator.source_info.span,
70 "invalid callee of type {:?}",
71 func.layout.ty
72 ),
73 };
74 let args = self.eval_operands(args)?;
75 let ret = match destination {
76 Some((dest, ret)) => Some((self.eval_place(dest)?, ret)),
77 None => None,
78 };
79 self.eval_fn_call(fn_val, abi, &args[..], ret, *cleanup)?;
80 // Sanity-check that `eval_fn_call` either pushed a new frame or
81 // did a jump to another block.
82 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
83 span_bug!(terminator.source_info.span, "evaluating this call made no progress");
84 }
85 }
86
87 Drop { location, target, unwind } => {
88 let place = self.eval_place(location)?;
89 let ty = place.layout.ty;
90 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
91
92 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
93 self.drop_in_place(place, instance, target, unwind)?;
94 }
95
96 Assert { ref cond, expected, ref msg, target, cleanup } => {
97 let cond_val =
98 self.read_immediate(self.eval_operand(cond, None)?)?.to_scalar()?.to_bool()?;
99 if expected == cond_val {
100 self.go_to_block(target);
101 } else {
102 M::assert_panic(self, msg, cleanup)?;
103 }
104 }
105
106 Abort => {
107 M::abort(self)?;
108 }
109
110 // When we encounter Resume, we've finished unwinding
111 // cleanup for the current stack frame. We pop it in order
112 // to continue unwinding the next frame
113 Resume => {
114 trace!("unwinding: resuming from cleanup");
115 // By definition, a Resume terminator means
116 // that we're unwinding
117 self.pop_stack_frame(/* unwinding */ true)?;
118 return Ok(());
119 }
120
121 // It is UB to ever encounter this.
122 Unreachable => throw_ub!(Unreachable),
123
124 // These should never occur for MIR we actually run.
125 DropAndReplace { .. }
126 | FalseEdges { .. }
127 | FalseUnwind { .. }
128 | Yield { .. }
129 | GeneratorDrop => span_bug!(
130 terminator.source_info.span,
131 "{:#?} should have been eliminated by MIR pass",
132 terminator.kind
133 ),
134
135 // Inline assembly can't be interpreted.
136 InlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"),
137 }
138
139 Ok(())
140 }
141
142 fn check_argument_compat(
143 rust_abi: bool,
144 caller: TyAndLayout<'tcx>,
145 callee: TyAndLayout<'tcx>,
146 ) -> bool {
147 if caller.ty == callee.ty {
148 // No question
149 return true;
150 }
151 if !rust_abi {
152 // Don't risk anything
153 return false;
154 }
155 // Compare layout
156 match (&caller.abi, &callee.abi) {
157 // Different valid ranges are okay (once we enforce validity,
158 // that will take care to make it UB to leave the range, just
159 // like for transmute).
160 (abi::Abi::Scalar(ref caller), abi::Abi::Scalar(ref callee)) => {
161 caller.value == callee.value
162 }
163 (
164 abi::Abi::ScalarPair(ref caller1, ref caller2),
165 abi::Abi::ScalarPair(ref callee1, ref callee2),
166 ) => caller1.value == callee1.value && caller2.value == callee2.value,
167 // Be conservative
168 _ => false,
169 }
170 }
171
172 /// Pass a single argument, checking the types for compatibility.
173 fn pass_argument(
174 &mut self,
175 rust_abi: bool,
176 caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>,
177 callee_arg: PlaceTy<'tcx, M::PointerTag>,
178 ) -> InterpResult<'tcx> {
179 if rust_abi && callee_arg.layout.is_zst() {
180 // Nothing to do.
181 trace!("Skipping callee ZST");
182 return Ok(());
183 }
184 let caller_arg = caller_arg.next().ok_or_else(|| {
185 err_ub_format!("calling a function with fewer arguments than it requires")
186 })?;
187 if rust_abi {
188 assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
189 }
190 // Now, check
191 if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
192 throw_ub_format!(
193 "calling a function with argument of type {:?} passing data of type {:?}",
194 callee_arg.layout.ty,
195 caller_arg.layout.ty
196 )
197 }
198 // We allow some transmutes here
199 self.copy_op_transmute(caller_arg, callee_arg)
200 }
201
202 /// Call this function -- pushing the stack frame and initializing the arguments.
203 fn eval_fn_call(
204 &mut self,
205 fn_val: FnVal<'tcx, M::ExtraFnVal>,
206 caller_abi: Abi,
207 args: &[OpTy<'tcx, M::PointerTag>],
208 ret: Option<(PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
209 unwind: Option<mir::BasicBlock>,
210 ) -> InterpResult<'tcx> {
211 trace!("eval_fn_call: {:#?}", fn_val);
212
213 let instance = match fn_val {
214 FnVal::Instance(instance) => instance,
215 FnVal::Other(extra) => {
216 return M::call_extra_fn(self, extra, args, ret, unwind);
217 }
218 };
219
220 // ABI check
221 {
222 let callee_abi = {
223 let instance_ty = instance.ty_env(*self.tcx, self.param_env);
224 match instance_ty.kind {
225 ty::FnDef(..) => instance_ty.fn_sig(*self.tcx).abi(),
226 ty::Closure(..) => Abi::RustCall,
227 ty::Generator(..) => Abi::Rust,
228 _ => bug!("unexpected callee ty: {:?}", instance_ty),
229 }
230 };
231 let normalize_abi = |abi| match abi {
232 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
233 // These are all the same ABI, really.
234 {
235 Abi::Rust
236 }
237 abi => abi,
238 };
239 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
240 throw_ub_format!(
241 "calling a function with ABI {:?} using caller ABI {:?}",
242 callee_abi,
243 caller_abi
244 )
245 }
246 }
247
248 match instance.def {
249 ty::InstanceDef::Intrinsic(..) => {
250 assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic);
251 M::call_intrinsic(self, instance, args, ret, unwind)
252 }
253 ty::InstanceDef::VtableShim(..)
254 | ty::InstanceDef::ReifyShim(..)
255 | ty::InstanceDef::ClosureOnceShim { .. }
256 | ty::InstanceDef::FnPtrShim(..)
257 | ty::InstanceDef::DropGlue(..)
258 | ty::InstanceDef::CloneShim(..)
259 | ty::InstanceDef::Item(_) => {
260 // We need MIR for this fn
261 let body = match M::find_mir_or_eval_fn(self, instance, args, ret, unwind)? {
262 Some(body) => body,
263 None => return Ok(()),
264 };
265
266 self.push_stack_frame(
267 instance,
268 body,
269 ret.map(|p| p.0),
270 StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind },
271 )?;
272
273 // If an error is raised here, pop the frame again to get an accurate backtrace.
274 // To this end, we wrap it all in a `try` block.
275 let res: InterpResult<'tcx> = try {
276 trace!(
277 "caller ABI: {:?}, args: {:#?}",
278 caller_abi,
279 args.iter()
280 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
281 .collect::<Vec<_>>()
282 );
283 trace!(
284 "spread_arg: {:?}, locals: {:#?}",
285 body.spread_arg,
286 body.args_iter()
287 .map(|local| (
288 local,
289 self.layout_of_local(self.frame(), local, None).unwrap().ty
290 ))
291 .collect::<Vec<_>>()
292 );
293
294 // Figure out how to pass which arguments.
295 // The Rust ABI is special: ZST get skipped.
296 let rust_abi = match caller_abi {
297 Abi::Rust | Abi::RustCall => true,
298 _ => false,
299 };
300 // We have two iterators: Where the arguments come from,
301 // and where they go to.
302
303 // For where they come from: If the ABI is RustCall, we untuple the
304 // last incoming argument. These two iterators do not have the same type,
305 // so to keep the code paths uniform we accept an allocation
306 // (for RustCall ABI only).
307 let caller_args: Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
308 if caller_abi == Abi::RustCall && !args.is_empty() {
309 // Untuple
310 let (&untuple_arg, args) = args.split_last().unwrap();
311 trace!("eval_fn_call: Will pass last argument by untupling");
312 Cow::from(
313 args.iter()
314 .map(|&a| Ok(a))
315 .chain(
316 (0..untuple_arg.layout.fields.count())
317 .map(|i| self.operand_field(untuple_arg, i)),
318 )
319 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>(
320 )?,
321 )
322 } else {
323 // Plain arg passing
324 Cow::from(args)
325 };
326 // Skip ZSTs
327 let mut caller_iter =
328 caller_args.iter().filter(|op| !rust_abi || !op.layout.is_zst()).copied();
329
330 // Now we have to spread them out across the callee's locals,
331 // taking into account the `spread_arg`. If we could write
332 // this is a single iterator (that handles `spread_arg`), then
333 // `pass_argument` would be the loop body. It takes care to
334 // not advance `caller_iter` for ZSTs.
335 for local in body.args_iter() {
336 let dest = self.eval_place(mir::Place::from(local))?;
337 if Some(local) == body.spread_arg {
338 // Must be a tuple
339 for i in 0..dest.layout.fields.count() {
340 let dest = self.place_field(dest, i)?;
341 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
342 }
343 } else {
344 // Normal argument
345 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
346 }
347 }
348 // Now we should have no more caller args
349 if caller_iter.next().is_some() {
350 throw_ub_format!("calling a function with more arguments than it expected")
351 }
352 // Don't forget to check the return type!
353 if let Some((caller_ret, _)) = ret {
354 let callee_ret = self.eval_place(mir::Place::return_place())?;
355 if !Self::check_argument_compat(
356 rust_abi,
357 caller_ret.layout,
358 callee_ret.layout,
359 ) {
360 throw_ub_format!(
361 "calling a function with return type {:?} passing \
362 return place of type {:?}",
363 callee_ret.layout.ty,
364 caller_ret.layout.ty
365 )
366 }
367 } else {
368 let local = mir::RETURN_PLACE;
369 let callee_layout = self.layout_of_local(self.frame(), local, None)?;
370 if !callee_layout.abi.is_uninhabited() {
371 throw_ub_format!("calling a returning function without a return place")
372 }
373 }
374 };
375 match res {
376 Err(err) => {
377 self.stack_mut().pop();
378 Err(err)
379 }
380 Ok(()) => Ok(()),
381 }
382 }
383 // cannot use the shim here, because that will only result in infinite recursion
384 ty::InstanceDef::Virtual(_, idx) => {
385 let mut args = args.to_vec();
386 // We have to implement all "object safe receivers". Currently we
387 // support built-in pointers (&, &mut, Box) as well as unsized-self. We do
388 // not yet support custom self types.
389 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
390 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
391 Some(_) => {
392 // Built-in pointer.
393 self.deref_operand(args[0])?
394 }
395 None => {
396 // Unsized self.
397 args[0].assert_mem_place(self)
398 }
399 };
400 // Find and consult vtable
401 let vtable = receiver_place.vtable();
402 let drop_fn = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?;
403
404 // `*mut receiver_place.layout.ty` is almost the layout that we
405 // want for args[0]: We have to project to field 0 because we want
406 // a thin pointer.
407 assert!(receiver_place.layout.is_unsized());
408 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
409 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
410 // Adjust receiver argument.
411 args[0] =
412 OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr));
413 trace!("Patched self operand to {:#?}", args[0]);
414 // recurse with concrete function
415 self.eval_fn_call(drop_fn, caller_abi, &args, ret, unwind)
416 }
417 }
418 }
419
420 fn drop_in_place(
421 &mut self,
422 place: PlaceTy<'tcx, M::PointerTag>,
423 instance: ty::Instance<'tcx>,
424 target: mir::BasicBlock,
425 unwind: Option<mir::BasicBlock>,
426 ) -> InterpResult<'tcx> {
427 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
428 // We take the address of the object. This may well be unaligned, which is fine
429 // for us here. However, unaligned accesses will probably make the actual drop
430 // implementation fail -- a problem shared by rustc.
431 let place = self.force_allocation(place)?;
432
433 let (instance, place) = match place.layout.ty.kind {
434 ty::Dynamic(..) => {
435 // Dropping a trait object.
436 self.unpack_dyn_trait(place)?
437 }
438 _ => (instance, place),
439 };
440
441 let arg = ImmTy::from_immediate(
442 place.to_ref(),
443 self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
444 );
445
446 let ty = self.tcx.mk_unit(); // return type is ()
447 let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
448
449 self.eval_fn_call(
450 FnVal::Instance(instance),
451 Abi::Rust,
452 &[arg.into()],
453 Some((dest.into(), target)),
454 unwind,
455 )
456 }
457 }