]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/interpret/terminator/mod.rs
New upstream version 1.25.0+dfsg1
[rustc.git] / src / librustc_mir / interpret / terminator / mod.rs
1 use rustc::mir;
2 use rustc::ty::{self, Ty};
3 use rustc::ty::layout::LayoutOf;
4 use syntax::codemap::Span;
5 use syntax::abi::Abi;
6
7 use rustc::mir::interpret::{EvalResult, PrimVal, Value};
8 use super::{EvalContext, eval_context,
9 Place, Machine, ValTy};
10
11 use rustc_data_structures::indexed_vec::Idx;
12 use interpret::memory::HasMemory;
13
14 mod drop;
15
16 impl<'a, 'tcx, M: Machine<'tcx>> EvalContext<'a, 'tcx, M> {
17 pub fn goto_block(&mut self, target: mir::BasicBlock) {
18 self.frame_mut().block = target;
19 self.frame_mut().stmt = 0;
20 }
21
22 pub(super) fn eval_terminator(
23 &mut self,
24 terminator: &mir::Terminator<'tcx>,
25 ) -> EvalResult<'tcx> {
26 use rustc::mir::TerminatorKind::*;
27 match terminator.kind {
28 Return => {
29 self.dump_local(self.frame().return_place);
30 self.pop_stack_frame()?
31 }
32
33 Goto { target } => self.goto_block(target),
34
35 SwitchInt {
36 ref discr,
37 ref values,
38 ref targets,
39 ..
40 } => {
41 // FIXME(CTFE): forbid branching
42 let discr_val = self.eval_operand(discr)?;
43 let discr_prim = self.value_to_primval(discr_val)?;
44
45 // Branch to the `otherwise` case by default, if no match is found.
46 let mut target_block = targets[targets.len() - 1];
47
48 for (index, const_int) in values.iter().enumerate() {
49 let prim = PrimVal::Bytes(const_int.to_u128_unchecked());
50 if discr_prim.to_bytes()? == prim.to_bytes()? {
51 target_block = targets[index];
52 break;
53 }
54 }
55
56 self.goto_block(target_block);
57 }
58
59 Call {
60 ref func,
61 ref args,
62 ref destination,
63 ..
64 } => {
65 let destination = match *destination {
66 Some((ref lv, target)) => Some((self.eval_place(lv)?, target)),
67 None => None,
68 };
69
70 let func = self.eval_operand(func)?;
71 let (fn_def, sig) = match func.ty.sty {
72 ty::TyFnPtr(sig) => {
73 let fn_ptr = self.value_to_primval(func)?.to_ptr()?;
74 let instance = self.memory.get_fn(fn_ptr)?;
75 let instance_ty = instance.ty(self.tcx);
76 match instance_ty.sty {
77 ty::TyFnDef(..) => {
78 let real_sig = instance_ty.fn_sig(self.tcx);
79 let sig = self.tcx.erase_late_bound_regions_and_normalize(&sig);
80 let real_sig = self.tcx.erase_late_bound_regions_and_normalize(&real_sig);
81 if !self.check_sig_compat(sig, real_sig)? {
82 return err!(FunctionPointerTyMismatch(real_sig, sig));
83 }
84 }
85 ref other => bug!("instance def ty: {:?}", other),
86 }
87 (instance, sig)
88 }
89 ty::TyFnDef(def_id, substs) => (
90 self.resolve(def_id, substs)?,
91 func.ty.fn_sig(self.tcx),
92 ),
93 _ => {
94 let msg = format!("can't handle callee of type {:?}", func.ty);
95 return err!(Unimplemented(msg));
96 }
97 };
98 let args = self.operands_to_args(args)?;
99 let sig = self.tcx.erase_late_bound_regions_and_normalize(&sig);
100 self.eval_fn_call(
101 fn_def,
102 destination,
103 &args,
104 terminator.source_info.span,
105 sig,
106 )?;
107 }
108
109 Drop {
110 ref location,
111 target,
112 ..
113 } => {
114 // FIXME(CTFE): forbid drop in const eval
115 let place = self.eval_place(location)?;
116 let ty = self.place_ty(location);
117 let ty = self.tcx.trans_apply_param_substs(self.substs(), &ty);
118 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
119
120 let instance = eval_context::resolve_drop_in_place(self.tcx, ty);
121 self.drop_place(
122 place,
123 instance,
124 ty,
125 terminator.source_info.span,
126 target,
127 )?;
128 }
129
130 Assert {
131 ref cond,
132 expected,
133 ref msg,
134 target,
135 ..
136 } => {
137 let cond_val = self.eval_operand_to_primval(cond)?.to_bool()?;
138 if expected == cond_val {
139 self.goto_block(target);
140 } else {
141 use rustc::mir::AssertMessage::*;
142 return match *msg {
143 BoundsCheck { ref len, ref index } => {
144 let span = terminator.source_info.span;
145 let len = self.eval_operand_to_primval(len)
146 .expect("can't eval len")
147 .to_u64()?;
148 let index = self.eval_operand_to_primval(index)
149 .expect("can't eval index")
150 .to_u64()?;
151 err!(ArrayIndexOutOfBounds(span, len, index))
152 }
153 Math(ref err) => {
154 err!(Math(terminator.source_info.span, err.clone()))
155 }
156 GeneratorResumedAfterReturn |
157 GeneratorResumedAfterPanic => unimplemented!(),
158 };
159 }
160 }
161
162 Yield { .. } => unimplemented!("{:#?}", terminator.kind),
163 GeneratorDrop => unimplemented!(),
164 DropAndReplace { .. } => unimplemented!(),
165 Resume => unimplemented!(),
166 Abort => unimplemented!(),
167 FalseEdges { .. } => bug!("should have been eliminated by `simplify_branches` mir pass"),
168 FalseUnwind { .. } => bug!("should have been eliminated by `simplify_branches` mir pass"),
169 Unreachable => return err!(Unreachable),
170 }
171
172 Ok(())
173 }
174
175 /// Decides whether it is okay to call the method with signature `real_sig` using signature `sig`.
176 /// FIXME: This should take into account the platform-dependent ABI description.
177 fn check_sig_compat(
178 &mut self,
179 sig: ty::FnSig<'tcx>,
180 real_sig: ty::FnSig<'tcx>,
181 ) -> EvalResult<'tcx, bool> {
182 fn check_ty_compat<'tcx>(ty: Ty<'tcx>, real_ty: Ty<'tcx>) -> bool {
183 if ty == real_ty {
184 return true;
185 } // This is actually a fast pointer comparison
186 return match (&ty.sty, &real_ty.sty) {
187 // Permit changing the pointer type of raw pointers and references as well as
188 // mutability of raw pointers.
189 // TODO: Should not be allowed when fat pointers are involved.
190 (&ty::TyRawPtr(_), &ty::TyRawPtr(_)) => true,
191 (&ty::TyRef(_, _), &ty::TyRef(_, _)) => {
192 ty.is_mutable_pointer() == real_ty.is_mutable_pointer()
193 }
194 // rule out everything else
195 _ => false,
196 };
197 }
198
199 if sig.abi == real_sig.abi && sig.variadic == real_sig.variadic &&
200 sig.inputs_and_output.len() == real_sig.inputs_and_output.len() &&
201 sig.inputs_and_output
202 .iter()
203 .zip(real_sig.inputs_and_output)
204 .all(|(ty, real_ty)| check_ty_compat(ty, real_ty))
205 {
206 // Definitely good.
207 return Ok(true);
208 }
209
210 if sig.variadic || real_sig.variadic {
211 // We're not touching this
212 return Ok(false);
213 }
214
215 // We need to allow what comes up when a non-capturing closure is cast to a fn().
216 match (sig.abi, real_sig.abi) {
217 (Abi::Rust, Abi::RustCall) // check the ABIs. This makes the test here non-symmetric.
218 if check_ty_compat(sig.output(), real_sig.output()) && real_sig.inputs_and_output.len() == 3 => {
219 // First argument of real_sig must be a ZST
220 let fst_ty = real_sig.inputs_and_output[0];
221 if self.layout_of(fst_ty)?.is_zst() {
222 // Second argument must be a tuple matching the argument list of sig
223 let snd_ty = real_sig.inputs_and_output[1];
224 match snd_ty.sty {
225 ty::TyTuple(tys, _) if sig.inputs().len() == tys.len() =>
226 if sig.inputs().iter().zip(tys).all(|(ty, real_ty)| check_ty_compat(ty, real_ty)) {
227 return Ok(true)
228 },
229 _ => {}
230 }
231 }
232 }
233 _ => {}
234 };
235
236 // Nope, this doesn't work.
237 return Ok(false);
238 }
239
240 fn eval_fn_call(
241 &mut self,
242 instance: ty::Instance<'tcx>,
243 destination: Option<(Place, mir::BasicBlock)>,
244 args: &[ValTy<'tcx>],
245 span: Span,
246 sig: ty::FnSig<'tcx>,
247 ) -> EvalResult<'tcx> {
248 trace!("eval_fn_call: {:#?}", instance);
249 match instance.def {
250 ty::InstanceDef::Intrinsic(..) => {
251 let (ret, target) = match destination {
252 Some(dest) => dest,
253 _ => return err!(Unreachable),
254 };
255 let ty = sig.output();
256 let layout = self.layout_of(ty)?;
257 M::call_intrinsic(self, instance, args, ret, layout, target)?;
258 self.dump_local(ret);
259 Ok(())
260 }
261 // FIXME: figure out why we can't just go through the shim
262 ty::InstanceDef::ClosureOnceShim { .. } => {
263 if M::eval_fn_call(self, instance, destination, args, span, sig)? {
264 return Ok(());
265 }
266 let mut arg_locals = self.frame().mir.args_iter();
267 match sig.abi {
268 // closure as closure once
269 Abi::RustCall => {
270 for (arg_local, &valty) in arg_locals.zip(args) {
271 let dest = self.eval_place(&mir::Place::Local(arg_local))?;
272 self.write_value(valty, dest)?;
273 }
274 }
275 // non capture closure as fn ptr
276 // need to inject zst ptr for closure object (aka do nothing)
277 // and need to pack arguments
278 Abi::Rust => {
279 trace!(
280 "arg_locals: {:?}",
281 self.frame().mir.args_iter().collect::<Vec<_>>()
282 );
283 trace!("args: {:?}", args);
284 let local = arg_locals.nth(1).unwrap();
285 for (i, &valty) in args.into_iter().enumerate() {
286 let dest = self.eval_place(&mir::Place::Local(local).field(
287 mir::Field::new(i),
288 valty.ty,
289 ))?;
290 self.write_value(valty, dest)?;
291 }
292 }
293 _ => bug!("bad ABI for ClosureOnceShim: {:?}", sig.abi),
294 }
295 Ok(())
296 }
297 ty::InstanceDef::FnPtrShim(..) |
298 ty::InstanceDef::DropGlue(..) |
299 ty::InstanceDef::CloneShim(..) |
300 ty::InstanceDef::Item(_) => {
301 // Push the stack frame, and potentially be entirely done if the call got hooked
302 if M::eval_fn_call(self, instance, destination, args, span, sig)? {
303 return Ok(());
304 }
305
306 // Pass the arguments
307 let mut arg_locals = self.frame().mir.args_iter();
308 trace!("ABI: {:?}", sig.abi);
309 trace!(
310 "arg_locals: {:?}",
311 self.frame().mir.args_iter().collect::<Vec<_>>()
312 );
313 trace!("args: {:?}", args);
314 match sig.abi {
315 Abi::RustCall => {
316 assert_eq!(args.len(), 2);
317
318 {
319 // write first argument
320 let first_local = arg_locals.next().unwrap();
321 let dest = self.eval_place(&mir::Place::Local(first_local))?;
322 self.write_value(args[0], dest)?;
323 }
324
325 // unpack and write all other args
326 let layout = self.layout_of(args[1].ty)?;
327 if let ty::TyTuple(..) = args[1].ty.sty {
328 if self.frame().mir.args_iter().count() == layout.fields.count() + 1 {
329 match args[1].value {
330 Value::ByRef(ptr, align) => {
331 for (i, arg_local) in arg_locals.enumerate() {
332 let field = layout.field(&self, i)?;
333 let offset = layout.fields.offset(i).bytes();
334 let arg = Value::ByRef(ptr.offset(offset, &self)?,
335 align.min(field.align));
336 let dest =
337 self.eval_place(&mir::Place::Local(arg_local))?;
338 trace!(
339 "writing arg {:?} to {:?} (type: {})",
340 arg,
341 dest,
342 field.ty
343 );
344 let valty = ValTy {
345 value: arg,
346 ty: field.ty,
347 };
348 self.write_value(valty, dest)?;
349 }
350 }
351 Value::ByVal(PrimVal::Undef) => {}
352 other => {
353 trace!("{:#?}, {:#?}", other, layout);
354 let mut layout = layout;
355 'outer: loop {
356 for i in 0..layout.fields.count() {
357 let field = layout.field(&self, i)?;
358 if layout.fields.offset(i).bytes() == 0 && layout.size == field.size {
359 layout = field;
360 continue 'outer;
361 }
362 }
363 break;
364 }
365 let dest = self.eval_place(&mir::Place::Local(
366 arg_locals.next().unwrap(),
367 ))?;
368 let valty = ValTy {
369 value: other,
370 ty: layout.ty,
371 };
372 self.write_value(valty, dest)?;
373 }
374 }
375 } else {
376 trace!("manual impl of rust-call ABI");
377 // called a manual impl of a rust-call function
378 let dest = self.eval_place(
379 &mir::Place::Local(arg_locals.next().unwrap()),
380 )?;
381 self.write_value(args[1], dest)?;
382 }
383 } else {
384 bug!(
385 "rust-call ABI tuple argument was {:#?}, {:#?}",
386 args[1].ty,
387 layout
388 );
389 }
390 }
391 _ => {
392 for (arg_local, &valty) in arg_locals.zip(args) {
393 let dest = self.eval_place(&mir::Place::Local(arg_local))?;
394 self.write_value(valty, dest)?;
395 }
396 }
397 }
398 Ok(())
399 }
400 // cannot use the shim here, because that will only result in infinite recursion
401 ty::InstanceDef::Virtual(_, idx) => {
402 let ptr_size = self.memory.pointer_size();
403 let ptr_align = self.tcx.data_layout.pointer_align;
404 let (ptr, vtable) = self.into_ptr_vtable_pair(args[0].value)?;
405 let fn_ptr = self.memory.read_ptr_sized_unsigned(
406 vtable.offset(ptr_size * (idx as u64 + 3), &self)?,
407 ptr_align
408 )?.to_ptr()?;
409 let instance = self.memory.get_fn(fn_ptr)?;
410 let mut args = args.to_vec();
411 let ty = self.layout_of(args[0].ty)?.field(&self, 0)?.ty;
412 args[0].ty = ty;
413 args[0].value = ptr.to_value();
414 // recurse with concrete function
415 self.eval_fn_call(instance, destination, &args, span, sig)
416 }
417 }
418 }
419 }