]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/interpret/terminator.rs
New upstream version 1.40.0+dfsg1
[rustc.git] / src / librustc_mir / interpret / terminator.rs
1 use std::borrow::Cow;
2
3 use rustc::{mir, ty};
4 use rustc::ty::Instance;
5 use rustc::ty::layout::{self, TyLayout, LayoutOf};
6 use syntax::source_map::Span;
7 use rustc_target::spec::abi::Abi;
8
9 use super::{
10 InterpResult, PointerArithmetic,
11 InterpCx, Machine, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup, FnVal,
12 };
13
14 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
15 #[inline]
16 pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
17 if let Some(target) = target {
18 self.frame_mut().block = target;
19 self.frame_mut().stmt = 0;
20 Ok(())
21 } else {
22 throw_ub!(Unreachable)
23 }
24 }
25
26 pub(super) fn eval_terminator(
27 &mut self,
28 terminator: &mir::Terminator<'tcx>,
29 ) -> InterpResult<'tcx> {
30 use rustc::mir::TerminatorKind::*;
31 match terminator.kind {
32 Return => {
33 self.frame().return_place.map(|r| self.dump_place(*r));
34 self.pop_stack_frame()?
35 }
36
37 Goto { target } => self.goto_block(Some(target))?,
38
39 SwitchInt {
40 ref discr,
41 ref values,
42 ref targets,
43 ..
44 } => {
45 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
46 trace!("SwitchInt({:?})", *discr);
47
48 // Branch to the `otherwise` case by default, if no match is found.
49 let mut target_block = targets[targets.len() - 1];
50
51 for (index, &const_int) in values.iter().enumerate() {
52 // Compare using binary_op, to also support pointer values
53 let res = self.overflowing_binary_op(mir::BinOp::Eq,
54 discr,
55 ImmTy::from_uint(const_int, discr.layout),
56 )?.0;
57 if res.to_bool()? {
58 target_block = targets[index];
59 break;
60 }
61 }
62
63 self.goto_block(Some(target_block))?;
64 }
65
66 Call {
67 ref func,
68 ref args,
69 ref destination,
70 ..
71 } => {
72 let (dest, ret) = match *destination {
73 Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
74 None => (None, None),
75 };
76
77 let func = self.eval_operand(func, None)?;
78 let (fn_val, abi) = match func.layout.ty.kind {
79 ty::FnPtr(sig) => {
80 let caller_abi = sig.abi();
81 let fn_ptr = self.read_scalar(func)?.not_undef()?;
82 let fn_val = self.memory.get_fn(fn_ptr)?;
83 (fn_val, caller_abi)
84 }
85 ty::FnDef(def_id, substs) => {
86 let sig = func.layout.ty.fn_sig(*self.tcx);
87 (FnVal::Instance(self.resolve(def_id, substs)?), sig.abi())
88 },
89 _ => {
90 bug!("invalid callee of type {:?}", func.layout.ty)
91 }
92 };
93 let args = self.eval_operands(args)?;
94 self.eval_fn_call(
95 fn_val,
96 terminator.source_info.span,
97 abi,
98 &args[..],
99 dest,
100 ret,
101 )?;
102 }
103
104 Drop {
105 ref location,
106 target,
107 ..
108 } => {
109 // FIXME(CTFE): forbid drop in const eval
110 let place = self.eval_place(location)?;
111 let ty = place.layout.ty;
112 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
113
114 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
115 self.drop_in_place(
116 place,
117 instance,
118 terminator.source_info.span,
119 target,
120 )?;
121 }
122
123 Assert {
124 ref cond,
125 expected,
126 ref msg,
127 target,
128 ..
129 } => {
130 let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
131 .to_scalar()?.to_bool()?;
132 if expected == cond_val {
133 self.goto_block(Some(target))?;
134 } else {
135 // Compute error message
136 use rustc::mir::interpret::PanicInfo::*;
137 return Err(match msg {
138 BoundsCheck { ref len, ref index } => {
139 let len = self
140 .read_immediate(self.eval_operand(len, None)?)
141 .expect("can't eval len")
142 .to_scalar()?
143 .to_bits(self.memory.pointer_size())? as u64;
144 let index = self
145 .read_immediate(self.eval_operand(index, None)?)
146 .expect("can't eval index")
147 .to_scalar()?
148 .to_bits(self.memory.pointer_size())? as u64;
149 err_panic!(BoundsCheck { len, index })
150 }
151 Overflow(op) => err_panic!(Overflow(*op)),
152 OverflowNeg => err_panic!(OverflowNeg),
153 DivisionByZero => err_panic!(DivisionByZero),
154 RemainderByZero => err_panic!(RemainderByZero),
155 GeneratorResumedAfterReturn => err_panic!(GeneratorResumedAfterReturn),
156 GeneratorResumedAfterPanic => err_panic!(GeneratorResumedAfterPanic),
157 Panic { .. } => bug!("`Panic` variant cannot occur in MIR"),
158 }
159 .into());
160 }
161 }
162
163 Yield { .. } |
164 GeneratorDrop |
165 DropAndReplace { .. } |
166 Resume |
167 Abort => unimplemented!("{:#?}", terminator.kind),
168 FalseEdges { .. } => bug!("should have been eliminated by\
169 `simplify_branches` mir pass"),
170 FalseUnwind { .. } => bug!("should have been eliminated by\
171 `simplify_branches` mir pass"),
172 Unreachable => throw_ub!(Unreachable),
173 }
174
175 Ok(())
176 }
177
178 fn check_argument_compat(
179 rust_abi: bool,
180 caller: TyLayout<'tcx>,
181 callee: TyLayout<'tcx>,
182 ) -> bool {
183 if caller.ty == callee.ty {
184 // No question
185 return true;
186 }
187 if !rust_abi {
188 // Don't risk anything
189 return false;
190 }
191 // Compare layout
192 match (&caller.abi, &callee.abi) {
193 // Different valid ranges are okay (once we enforce validity,
194 // that will take care to make it UB to leave the range, just
195 // like for transmute).
196 (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) =>
197 caller.value == callee.value,
198 (layout::Abi::ScalarPair(ref caller1, ref caller2),
199 layout::Abi::ScalarPair(ref callee1, ref callee2)) =>
200 caller1.value == callee1.value && caller2.value == callee2.value,
201 // Be conservative
202 _ => false
203 }
204 }
205
206 /// Pass a single argument, checking the types for compatibility.
207 fn pass_argument(
208 &mut self,
209 rust_abi: bool,
210 caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
211 callee_arg: PlaceTy<'tcx, M::PointerTag>,
212 ) -> InterpResult<'tcx> {
213 if rust_abi && callee_arg.layout.is_zst() {
214 // Nothing to do.
215 trace!("Skipping callee ZST");
216 return Ok(());
217 }
218 let caller_arg = caller_arg.next()
219 .ok_or_else(|| err_unsup!(FunctionArgCountMismatch)) ?;
220 if rust_abi {
221 debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
222 }
223 // Now, check
224 if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
225 throw_unsup!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty))
226 }
227 // We allow some transmutes here
228 self.copy_op_transmute(caller_arg, callee_arg)
229 }
230
231 /// Call this function -- pushing the stack frame and initializing the arguments.
232 fn eval_fn_call(
233 &mut self,
234 fn_val: FnVal<'tcx, M::ExtraFnVal>,
235 span: Span,
236 caller_abi: Abi,
237 args: &[OpTy<'tcx, M::PointerTag>],
238 dest: Option<PlaceTy<'tcx, M::PointerTag>>,
239 ret: Option<mir::BasicBlock>,
240 ) -> InterpResult<'tcx> {
241 trace!("eval_fn_call: {:#?}", fn_val);
242
243 let instance = match fn_val {
244 FnVal::Instance(instance) => instance,
245 FnVal::Other(extra) => {
246 return M::call_extra_fn(self, extra, args, dest, ret);
247 }
248 };
249
250 match instance.def {
251 ty::InstanceDef::Intrinsic(..) => {
252 // The intrinsic itself cannot diverge, so if we got here without a return
253 // place... (can happen e.g., for transmute returning `!`)
254 let dest = match dest {
255 Some(dest) => dest,
256 None => throw_ub!(Unreachable)
257 };
258 M::call_intrinsic(self, span, instance, args, dest)?;
259 // No stack frame gets pushed, the main loop will just act as if the
260 // call completed.
261 self.goto_block(ret)?;
262 self.dump_place(*dest);
263 Ok(())
264 }
265 ty::InstanceDef::VtableShim(..) |
266 ty::InstanceDef::ReifyShim(..) |
267 ty::InstanceDef::ClosureOnceShim { .. } |
268 ty::InstanceDef::FnPtrShim(..) |
269 ty::InstanceDef::DropGlue(..) |
270 ty::InstanceDef::CloneShim(..) |
271 ty::InstanceDef::Item(_) => {
272 // ABI check
273 {
274 let callee_abi = {
275 let instance_ty = instance.ty(*self.tcx);
276 match instance_ty.kind {
277 ty::FnDef(..) =>
278 instance_ty.fn_sig(*self.tcx).abi(),
279 ty::Closure(..) => Abi::RustCall,
280 ty::Generator(..) => Abi::Rust,
281 _ => bug!("unexpected callee ty: {:?}", instance_ty),
282 }
283 };
284 let normalize_abi = |abi| match abi {
285 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
286 // These are all the same ABI, really.
287 Abi::Rust,
288 abi =>
289 abi,
290 };
291 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
292 throw_unsup!(FunctionAbiMismatch(caller_abi, callee_abi))
293 }
294 }
295
296 // We need MIR for this fn
297 let body = match M::find_fn(self, instance, args, dest, ret)? {
298 Some(body) => body,
299 None => return Ok(()),
300 };
301
302 self.push_stack_frame(
303 instance,
304 span,
305 body,
306 dest,
307 StackPopCleanup::Goto(ret),
308 )?;
309
310 // We want to pop this frame again in case there was an error, to put
311 // the blame in the right location. Until the 2018 edition is used in
312 // the compiler, we have to do this with an immediately invoked function.
313 let res = (||{
314 trace!(
315 "caller ABI: {:?}, args: {:#?}",
316 caller_abi,
317 args.iter()
318 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
319 .collect::<Vec<_>>()
320 );
321 trace!(
322 "spread_arg: {:?}, locals: {:#?}",
323 body.spread_arg,
324 body.args_iter()
325 .map(|local|
326 (local, self.layout_of_local(self.frame(), local, None).unwrap().ty)
327 )
328 .collect::<Vec<_>>()
329 );
330
331 // Figure out how to pass which arguments.
332 // The Rust ABI is special: ZST get skipped.
333 let rust_abi = match caller_abi {
334 Abi::Rust | Abi::RustCall => true,
335 _ => false
336 };
337 // We have two iterators: Where the arguments come from,
338 // and where they go to.
339
340 // For where they come from: If the ABI is RustCall, we untuple the
341 // last incoming argument. These two iterators do not have the same type,
342 // so to keep the code paths uniform we accept an allocation
343 // (for RustCall ABI only).
344 let caller_args : Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
345 if caller_abi == Abi::RustCall && !args.is_empty() {
346 // Untuple
347 let (&untuple_arg, args) = args.split_last().unwrap();
348 trace!("eval_fn_call: Will pass last argument by untupling");
349 Cow::from(args.iter().map(|&a| Ok(a))
350 .chain((0..untuple_arg.layout.fields.count()).into_iter()
351 .map(|i| self.operand_field(untuple_arg, i as u64))
352 )
353 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
354 } else {
355 // Plain arg passing
356 Cow::from(args)
357 };
358 // Skip ZSTs
359 let mut caller_iter = caller_args.iter()
360 .filter(|op| !rust_abi || !op.layout.is_zst())
361 .map(|op| *op);
362
363 // Now we have to spread them out across the callee's locals,
364 // taking into account the `spread_arg`. If we could write
365 // this is a single iterator (that handles `spread_arg`), then
366 // `pass_argument` would be the loop body. It takes care to
367 // not advance `caller_iter` for ZSTs.
368 let mut locals_iter = body.args_iter();
369 while let Some(local) = locals_iter.next() {
370 let dest = self.eval_place(
371 &mir::Place::from(local)
372 )?;
373 if Some(local) == body.spread_arg {
374 // Must be a tuple
375 for i in 0..dest.layout.fields.count() {
376 let dest = self.place_field(dest, i as u64)?;
377 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
378 }
379 } else {
380 // Normal argument
381 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
382 }
383 }
384 // Now we should have no more caller args
385 if caller_iter.next().is_some() {
386 trace!("Caller has passed too many args");
387 throw_unsup!(FunctionArgCountMismatch)
388 }
389 // Don't forget to check the return type!
390 if let Some(caller_ret) = dest {
391 let callee_ret = self.eval_place(
392 &mir::Place::return_place()
393 )?;
394 if !Self::check_argument_compat(
395 rust_abi,
396 caller_ret.layout,
397 callee_ret.layout,
398 ) {
399 throw_unsup!(
400 FunctionRetMismatch(caller_ret.layout.ty, callee_ret.layout.ty)
401 )
402 }
403 } else {
404 let local = mir::RETURN_PLACE;
405 let callee_layout = self.layout_of_local(self.frame(), local, None)?;
406 if !callee_layout.abi.is_uninhabited() {
407 throw_unsup!(FunctionRetMismatch(
408 self.tcx.types.never, callee_layout.ty
409 ))
410 }
411 }
412 Ok(())
413 })();
414 match res {
415 Err(err) => {
416 self.stack.pop();
417 Err(err)
418 }
419 Ok(v) => Ok(v)
420 }
421 }
422 // cannot use the shim here, because that will only result in infinite recursion
423 ty::InstanceDef::Virtual(_, idx) => {
424 let mut args = args.to_vec();
425 let ptr_size = self.pointer_size();
426 // We have to implement all "object safe receivers". Currently we
427 // support built-in pointers (&, &mut, Box) as well as unsized-self. We do
428 // not yet support custom self types.
429 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
430 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
431 Some(_) => {
432 // Built-in pointer.
433 self.deref_operand(args[0])?
434 }
435 None => {
436 // Unsized self.
437 args[0].assert_mem_place()
438 }
439 };
440 // Find and consult vtable
441 let vtable = receiver_place.vtable();
442 let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
443 let vtable_slot = self.memory.check_ptr_access(
444 vtable_slot,
445 ptr_size,
446 self.tcx.data_layout.pointer_align.abi,
447 )?.expect("cannot be a ZST");
448 let fn_ptr = self.memory.get(vtable_slot.alloc_id)?
449 .read_ptr_sized(self, vtable_slot)?.not_undef()?;
450 let drop_fn = self.memory.get_fn(fn_ptr)?;
451
452 // `*mut receiver_place.layout.ty` is almost the layout that we
453 // want for args[0]: We have to project to field 0 because we want
454 // a thin pointer.
455 assert!(receiver_place.layout.is_unsized());
456 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
457 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
458 // Adjust receiver argument.
459 args[0] = OpTy::from(ImmTy {
460 layout: this_receiver_ptr,
461 imm: receiver_place.ptr.into()
462 });
463 trace!("Patched self operand to {:#?}", args[0]);
464 // recurse with concrete function
465 self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret)
466 }
467 }
468 }
469
470 fn drop_in_place(
471 &mut self,
472 place: PlaceTy<'tcx, M::PointerTag>,
473 instance: ty::Instance<'tcx>,
474 span: Span,
475 target: mir::BasicBlock,
476 ) -> InterpResult<'tcx> {
477 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
478 // We take the address of the object. This may well be unaligned, which is fine
479 // for us here. However, unaligned accesses will probably make the actual drop
480 // implementation fail -- a problem shared by rustc.
481 let place = self.force_allocation(place)?;
482
483 let (instance, place) = match place.layout.ty.kind {
484 ty::Dynamic(..) => {
485 // Dropping a trait object.
486 self.unpack_dyn_trait(place)?
487 }
488 _ => (instance, place),
489 };
490
491 let arg = ImmTy {
492 imm: place.to_ref(),
493 layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
494 };
495
496 let ty = self.tcx.mk_unit(); // return type is ()
497 let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
498
499 self.eval_fn_call(
500 FnVal::Instance(instance),
501 span,
502 Abi::Rust,
503 &[arg.into()],
504 Some(dest.into()),
505 Some(target),
506 )
507 }
508 }