]>
Commit | Line | Data |
---|---|---|
b7449926 | 1 | use std::borrow::Cow; |
ba9703b0 | 2 | use std::convert::TryFrom; |
b7449926 | 3 | |
17df50a5 XL |
4 | use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; |
5 | use rustc_middle::ty::layout::{self, TyAndLayout}; | |
ba9703b0 | 6 | use rustc_middle::ty::Instance; |
17df50a5 XL |
7 | use rustc_middle::{ |
8 | mir, | |
9 | ty::{self, Ty}, | |
10 | }; | |
ba9703b0 | 11 | use rustc_target::abi::{self, LayoutOf as _}; |
b7449926 XL |
12 | use rustc_target::spec::abi::Abi; |
13 | ||
b7449926 | 14 | use super::{ |
dfeec247 | 15 | FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, StackPopCleanup, |
17df50a5 | 16 | StackPopUnwind, |
b7449926 XL |
17 | }; |
18 | ||
ba9703b0 | 19 | impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { |
17df50a5 XL |
20 | fn fn_can_unwind(&self, attrs: CodegenFnAttrFlags, abi: Abi) -> bool { |
21 | layout::fn_can_unwind( | |
22 | self.tcx.sess.panic_strategy(), | |
23 | attrs, | |
24 | layout::conv_from_spec_abi(*self.tcx, abi), | |
25 | abi, | |
26 | ) | |
27 | } | |
28 | ||
b7449926 XL |
29 | pub(super) fn eval_terminator( |
30 | &mut self, | |
31 | terminator: &mir::Terminator<'tcx>, | |
dc9dc135 | 32 | ) -> InterpResult<'tcx> { |
ba9703b0 | 33 | use rustc_middle::mir::TerminatorKind::*; |
b7449926 XL |
34 | match terminator.kind { |
35 | Return => { | |
60c5eb7d | 36 | self.pop_stack_frame(/* unwinding */ false)? |
b7449926 XL |
37 | } |
38 | ||
60c5eb7d | 39 | Goto { target } => self.go_to_block(target), |
b7449926 | 40 | |
29967ef6 | 41 | SwitchInt { ref discr, ref targets, switch_ty } => { |
6a06907d | 42 | let discr = self.read_immediate(&self.eval_operand(discr, None)?)?; |
b7449926 | 43 | trace!("SwitchInt({:?})", *discr); |
f035d41b | 44 | assert_eq!(discr.layout.ty, switch_ty); |
b7449926 XL |
45 | |
46 | // Branch to the `otherwise` case by default, if no match is found. | |
29967ef6 XL |
47 | assert!(!targets.iter().is_empty()); |
48 | let mut target_block = targets.otherwise(); | |
b7449926 | 49 | |
29967ef6 | 50 | for (const_int, target) in targets.iter() { |
b7449926 | 51 | // Compare using binary_op, to also support pointer values |
dfeec247 XL |
52 | let res = self |
53 | .overflowing_binary_op( | |
54 | mir::BinOp::Eq, | |
6a06907d XL |
55 | &discr, |
56 | &ImmTy::from_uint(const_int, discr.layout), | |
dfeec247 XL |
57 | )? |
58 | .0; | |
b7449926 | 59 | if res.to_bool()? { |
29967ef6 | 60 | target_block = target; |
b7449926 XL |
61 | break; |
62 | } | |
63 | } | |
64 | ||
60c5eb7d | 65 | self.go_to_block(target_block); |
b7449926 XL |
66 | } |
67 | ||
f035d41b | 68 | Call { ref func, ref args, destination, ref cleanup, from_hir_call: _, fn_span: _ } => { |
ba9703b0 | 69 | let old_stack = self.frame_idx(); |
f9f354fc | 70 | let old_loc = self.frame().loc; |
b7449926 | 71 | let func = self.eval_operand(func, None)?; |
17df50a5 | 72 | let (fn_val, abi, caller_can_unwind) = match *func.layout.ty.kind() { |
b7449926 XL |
73 | ty::FnPtr(sig) => { |
74 | let caller_abi = sig.abi(); | |
6a06907d | 75 | let fn_ptr = self.read_scalar(&func)?.check_init()?; |
416331ca | 76 | let fn_val = self.memory.get_fn(fn_ptr)?; |
17df50a5 XL |
77 | ( |
78 | fn_val, | |
79 | caller_abi, | |
80 | self.fn_can_unwind(layout::fn_ptr_codegen_fn_attr_flags(), caller_abi), | |
81 | ) | |
b7449926 XL |
82 | } |
83 | ty::FnDef(def_id, substs) => { | |
84 | let sig = func.layout.ty.fn_sig(*self.tcx); | |
1b1a35ee XL |
85 | ( |
86 | FnVal::Instance( | |
87 | self.resolve(ty::WithOptConstParam::unknown(def_id), substs)?, | |
88 | ), | |
89 | sig.abi(), | |
17df50a5 | 90 | self.fn_can_unwind(self.tcx.codegen_fn_attrs(def_id).flags, sig.abi()), |
1b1a35ee | 91 | ) |
b7449926 | 92 | } |
ba9703b0 XL |
93 | _ => span_bug!( |
94 | terminator.source_info.span, | |
95 | "invalid callee of type {:?}", | |
96 | func.layout.ty | |
97 | ), | |
b7449926 XL |
98 | }; |
99 | let args = self.eval_operands(args)?; | |
6a06907d | 100 | let dest_place; |
60c5eb7d | 101 | let ret = match destination { |
6a06907d XL |
102 | Some((dest, ret)) => { |
103 | dest_place = self.eval_place(dest)?; | |
104 | Some((&dest_place, ret)) | |
105 | } | |
60c5eb7d XL |
106 | None => None, |
107 | }; | |
17df50a5 XL |
108 | self.eval_fn_call( |
109 | fn_val, | |
110 | abi, | |
111 | &args[..], | |
112 | ret, | |
113 | match (cleanup, caller_can_unwind) { | |
114 | (Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup), | |
115 | (None, true) => StackPopUnwind::Skip, | |
116 | (_, false) => StackPopUnwind::NotAllowed, | |
117 | }, | |
118 | )?; | |
ba9703b0 XL |
119 | // Sanity-check that `eval_fn_call` either pushed a new frame or |
120 | // did a jump to another block. | |
f9f354fc | 121 | if self.frame_idx() == old_stack && self.frame().loc == old_loc { |
ba9703b0 XL |
122 | span_bug!(terminator.source_info.span, "evaluating this call made no progress"); |
123 | } | |
b7449926 XL |
124 | } |
125 | ||
f035d41b XL |
126 | Drop { place, target, unwind } => { |
127 | let place = self.eval_place(place)?; | |
b7449926 | 128 | let ty = place.layout.ty; |
f035d41b | 129 | trace!("TerminatorKind::drop: {:?}, type {}", place, ty); |
b7449926 | 130 | |
dc9dc135 | 131 | let instance = Instance::resolve_drop_in_place(*self.tcx, ty); |
6a06907d | 132 | self.drop_in_place(&place, instance, target, unwind)?; |
b7449926 XL |
133 | } |
134 | ||
dfeec247 XL |
135 | Assert { ref cond, expected, ref msg, target, cleanup } => { |
136 | let cond_val = | |
6a06907d | 137 | self.read_immediate(&self.eval_operand(cond, None)?)?.to_scalar()?.to_bool()?; |
b7449926 | 138 | if expected == cond_val { |
60c5eb7d | 139 | self.go_to_block(target); |
b7449926 | 140 | } else { |
ba9703b0 | 141 | M::assert_panic(self, msg, cleanup)?; |
b7449926 XL |
142 | } |
143 | } | |
144 | ||
ba9703b0 | 145 | Abort => { |
fc512014 | 146 | M::abort(self, "the program aborted execution".to_owned())?; |
ba9703b0 XL |
147 | } |
148 | ||
60c5eb7d XL |
149 | // When we encounter Resume, we've finished unwinding |
150 | // cleanup for the current stack frame. We pop it in order | |
151 | // to continue unwinding the next frame | |
152 | Resume => { | |
153 | trace!("unwinding: resuming from cleanup"); | |
154 | // By definition, a Resume terminator means | |
155 | // that we're unwinding | |
156 | self.pop_stack_frame(/* unwinding */ true)?; | |
dfeec247 XL |
157 | return Ok(()); |
158 | } | |
60c5eb7d XL |
159 | |
160 | // It is UB to ever encounter this. | |
161 | Unreachable => throw_ub!(Unreachable), | |
162 | ||
163 | // These should never occur for MIR we actually run. | |
ba9703b0 | 164 | DropAndReplace { .. } |
f035d41b | 165 | | FalseEdge { .. } |
ba9703b0 XL |
166 | | FalseUnwind { .. } |
167 | | Yield { .. } | |
168 | | GeneratorDrop => span_bug!( | |
169 | terminator.source_info.span, | |
170 | "{:#?} should have been eliminated by MIR pass", | |
171 | terminator.kind | |
172 | ), | |
f9f354fc XL |
173 | |
174 | // Inline assembly can't be interpreted. | |
175 | InlineAsm { .. } => throw_unsup_format!("inline assembly is not supported"), | |
b7449926 XL |
176 | } |
177 | ||
178 | Ok(()) | |
179 | } | |
180 | ||
181 | fn check_argument_compat( | |
a1dfa0c6 | 182 | rust_abi: bool, |
ba9703b0 XL |
183 | caller: TyAndLayout<'tcx>, |
184 | callee: TyAndLayout<'tcx>, | |
b7449926 XL |
185 | ) -> bool { |
186 | if caller.ty == callee.ty { | |
187 | // No question | |
188 | return true; | |
189 | } | |
a1dfa0c6 XL |
190 | if !rust_abi { |
191 | // Don't risk anything | |
192 | return false; | |
193 | } | |
b7449926 XL |
194 | // Compare layout |
195 | match (&caller.abi, &callee.abi) { | |
a1dfa0c6 XL |
196 | // Different valid ranges are okay (once we enforce validity, |
197 | // that will take care to make it UB to leave the range, just | |
198 | // like for transmute). | |
ba9703b0 | 199 | (abi::Abi::Scalar(ref caller), abi::Abi::Scalar(ref callee)) => { |
dfeec247 XL |
200 | caller.value == callee.value |
201 | } | |
202 | ( | |
ba9703b0 XL |
203 | abi::Abi::ScalarPair(ref caller1, ref caller2), |
204 | abi::Abi::ScalarPair(ref callee1, ref callee2), | |
dfeec247 | 205 | ) => caller1.value == callee1.value && caller2.value == callee2.value, |
b7449926 | 206 | // Be conservative |
dfeec247 | 207 | _ => false, |
b7449926 XL |
208 | } |
209 | } | |
210 | ||
211 | /// Pass a single argument, checking the types for compatibility. | |
212 | fn pass_argument( | |
213 | &mut self, | |
a1dfa0c6 | 214 | rust_abi: bool, |
dfeec247 | 215 | caller_arg: &mut impl Iterator<Item = OpTy<'tcx, M::PointerTag>>, |
6a06907d | 216 | callee_arg: &PlaceTy<'tcx, M::PointerTag>, |
dc9dc135 | 217 | ) -> InterpResult<'tcx> { |
a1dfa0c6 | 218 | if rust_abi && callee_arg.layout.is_zst() { |
b7449926 XL |
219 | // Nothing to do. |
220 | trace!("Skipping callee ZST"); | |
221 | return Ok(()); | |
222 | } | |
ba9703b0 XL |
223 | let caller_arg = caller_arg.next().ok_or_else(|| { |
224 | err_ub_format!("calling a function with fewer arguments than it requires") | |
225 | })?; | |
a1dfa0c6 | 226 | if rust_abi { |
74b04a01 | 227 | assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out"); |
b7449926 XL |
228 | } |
229 | // Now, check | |
a1dfa0c6 | 230 | if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) { |
ba9703b0 XL |
231 | throw_ub_format!( |
232 | "calling a function with argument of type {:?} passing data of type {:?}", | |
233 | callee_arg.layout.ty, | |
234 | caller_arg.layout.ty | |
235 | ) | |
b7449926 | 236 | } |
0bf4aa26 | 237 | // We allow some transmutes here |
6a06907d | 238 | self.copy_op_transmute(&caller_arg, callee_arg) |
b7449926 XL |
239 | } |
240 | ||
241 | /// Call this function -- pushing the stack frame and initializing the arguments. | |
242 | fn eval_fn_call( | |
243 | &mut self, | |
416331ca | 244 | fn_val: FnVal<'tcx, M::ExtraFnVal>, |
b7449926 | 245 | caller_abi: Abi, |
0bf4aa26 | 246 | args: &[OpTy<'tcx, M::PointerTag>], |
6a06907d | 247 | ret: Option<(&PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>, |
17df50a5 | 248 | mut unwind: StackPopUnwind, |
dc9dc135 | 249 | ) -> InterpResult<'tcx> { |
416331ca XL |
250 | trace!("eval_fn_call: {:#?}", fn_val); |
251 | ||
252 | let instance = match fn_val { | |
253 | FnVal::Instance(instance) => instance, | |
254 | FnVal::Other(extra) => { | |
5869c6ff | 255 | return M::call_extra_fn(self, extra, caller_abi, args, ret, unwind); |
416331ca XL |
256 | } |
257 | }; | |
b7449926 | 258 | |
17df50a5 XL |
259 | let get_abi = |this: &Self, instance_ty: Ty<'tcx>| match instance_ty.kind() { |
260 | ty::FnDef(..) => instance_ty.fn_sig(*this.tcx).abi(), | |
261 | ty::Closure(..) => Abi::RustCall, | |
262 | ty::Generator(..) => Abi::Rust, | |
263 | _ => span_bug!(this.cur_span(), "unexpected callee ty: {:?}", instance_ty), | |
264 | }; | |
265 | ||
60c5eb7d | 266 | // ABI check |
17df50a5 | 267 | let check_abi = |callee_abi: Abi| -> InterpResult<'tcx> { |
60c5eb7d XL |
268 | let normalize_abi = |abi| match abi { |
269 | Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic => | |
dfeec247 XL |
270 | // These are all the same ABI, really. |
271 | { | |
272 | Abi::Rust | |
273 | } | |
274 | abi => abi, | |
60c5eb7d XL |
275 | }; |
276 | if normalize_abi(caller_abi) != normalize_abi(callee_abi) { | |
ba9703b0 | 277 | throw_ub_format!( |
6a06907d XL |
278 | "calling a function with ABI {} using caller ABI {}", |
279 | callee_abi.name(), | |
280 | caller_abi.name() | |
ba9703b0 | 281 | ) |
60c5eb7d | 282 | } |
17df50a5 XL |
283 | Ok(()) |
284 | }; | |
60c5eb7d | 285 | |
b7449926 XL |
286 | match instance.def { |
287 | ty::InstanceDef::Intrinsic(..) => { | |
17df50a5 XL |
288 | if M::enforce_abi(self) { |
289 | check_abi(get_abi(self, instance.ty(*self.tcx, self.param_env)))?; | |
290 | } | |
60c5eb7d | 291 | assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic); |
ba9703b0 | 292 | M::call_intrinsic(self, instance, args, ret, unwind) |
b7449926 | 293 | } |
dfeec247 XL |
294 | ty::InstanceDef::VtableShim(..) |
295 | | ty::InstanceDef::ReifyShim(..) | |
296 | | ty::InstanceDef::ClosureOnceShim { .. } | |
297 | | ty::InstanceDef::FnPtrShim(..) | |
298 | | ty::InstanceDef::DropGlue(..) | |
299 | | ty::InstanceDef::CloneShim(..) | |
300 | | ty::InstanceDef::Item(_) => { | |
b7449926 | 301 | // We need MIR for this fn |
5869c6ff XL |
302 | let body = |
303 | match M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? { | |
304 | Some(body) => body, | |
305 | None => return Ok(()), | |
306 | }; | |
b7449926 | 307 | |
17df50a5 XL |
308 | // Check against the ABI of the MIR body we are calling (not the ABI of `instance`; |
309 | // these can differ when `find_mir_or_eval_fn` does something clever like resolve | |
310 | // exported symbol names). | |
311 | let callee_def_id = body.source.def_id(); | |
312 | let callee_abi = get_abi(self, self.tcx.type_of(callee_def_id)); | |
313 | ||
314 | if M::enforce_abi(self) { | |
315 | check_abi(callee_abi)?; | |
316 | } | |
317 | ||
318 | if !matches!(unwind, StackPopUnwind::NotAllowed) | |
319 | && !self | |
320 | .fn_can_unwind(self.tcx.codegen_fn_attrs(callee_def_id).flags, callee_abi) | |
321 | { | |
322 | // The callee cannot unwind. | |
323 | unwind = StackPopUnwind::NotAllowed; | |
324 | } | |
325 | ||
b7449926 XL |
326 | self.push_stack_frame( |
327 | instance, | |
dc9dc135 | 328 | body, |
60c5eb7d | 329 | ret.map(|p| p.0), |
dfeec247 | 330 | StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind }, |
b7449926 XL |
331 | )?; |
332 | ||
ba9703b0 XL |
333 | // If an error is raised here, pop the frame again to get an accurate backtrace. |
334 | // To this end, we wrap it all in a `try` block. | |
335 | let res: InterpResult<'tcx> = try { | |
336 | trace!( | |
337 | "caller ABI: {:?}, args: {:#?}", | |
338 | caller_abi, | |
339 | args.iter() | |
340 | .map(|arg| (arg.layout.ty, format!("{:?}", **arg))) | |
341 | .collect::<Vec<_>>() | |
342 | ); | |
343 | trace!( | |
344 | "spread_arg: {:?}, locals: {:#?}", | |
345 | body.spread_arg, | |
346 | body.args_iter() | |
347 | .map(|local| ( | |
348 | local, | |
349 | self.layout_of_local(self.frame(), local, None).unwrap().ty | |
350 | )) | |
351 | .collect::<Vec<_>>() | |
352 | ); | |
353 | ||
354 | // Figure out how to pass which arguments. | |
355 | // The Rust ABI is special: ZST get skipped. | |
356 | let rust_abi = match caller_abi { | |
357 | Abi::Rust | Abi::RustCall => true, | |
358 | _ => false, | |
359 | }; | |
360 | // We have two iterators: Where the arguments come from, | |
361 | // and where they go to. | |
362 | ||
363 | // For where they come from: If the ABI is RustCall, we untuple the | |
364 | // last incoming argument. These two iterators do not have the same type, | |
365 | // so to keep the code paths uniform we accept an allocation | |
366 | // (for RustCall ABI only). | |
367 | let caller_args: Cow<'_, [OpTy<'tcx, M::PointerTag>]> = | |
368 | if caller_abi == Abi::RustCall && !args.is_empty() { | |
369 | // Untuple | |
6a06907d | 370 | let (untuple_arg, args) = args.split_last().unwrap(); |
ba9703b0 XL |
371 | trace!("eval_fn_call: Will pass last argument by untupling"); |
372 | Cow::from( | |
373 | args.iter() | |
374 | .map(|&a| Ok(a)) | |
375 | .chain( | |
376 | (0..untuple_arg.layout.fields.count()) | |
377 | .map(|i| self.operand_field(untuple_arg, i)), | |
378 | ) | |
379 | .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>( | |
380 | )?, | |
381 | ) | |
382 | } else { | |
383 | // Plain arg passing | |
384 | Cow::from(args) | |
dfeec247 | 385 | }; |
ba9703b0 XL |
386 | // Skip ZSTs |
387 | let mut caller_iter = | |
388 | caller_args.iter().filter(|op| !rust_abi || !op.layout.is_zst()).copied(); | |
389 | ||
390 | // Now we have to spread them out across the callee's locals, | |
391 | // taking into account the `spread_arg`. If we could write | |
392 | // this is a single iterator (that handles `spread_arg`), then | |
393 | // `pass_argument` would be the loop body. It takes care to | |
394 | // not advance `caller_iter` for ZSTs. | |
395 | for local in body.args_iter() { | |
396 | let dest = self.eval_place(mir::Place::from(local))?; | |
397 | if Some(local) == body.spread_arg { | |
398 | // Must be a tuple | |
399 | for i in 0..dest.layout.fields.count() { | |
6a06907d XL |
400 | let dest = self.place_field(&dest, i)?; |
401 | self.pass_argument(rust_abi, &mut caller_iter, &dest)?; | |
b7449926 | 402 | } |
ba9703b0 XL |
403 | } else { |
404 | // Normal argument | |
6a06907d | 405 | self.pass_argument(rust_abi, &mut caller_iter, &dest)?; |
b7449926 | 406 | } |
ba9703b0 XL |
407 | } |
408 | // Now we should have no more caller args | |
409 | if caller_iter.next().is_some() { | |
410 | throw_ub_format!("calling a function with more arguments than it expected") | |
411 | } | |
412 | // Don't forget to check the return type! | |
413 | if let Some((caller_ret, _)) = ret { | |
414 | let callee_ret = self.eval_place(mir::Place::return_place())?; | |
415 | if !Self::check_argument_compat( | |
416 | rust_abi, | |
417 | caller_ret.layout, | |
418 | callee_ret.layout, | |
419 | ) { | |
420 | throw_ub_format!( | |
421 | "calling a function with return type {:?} passing \ | |
422 | return place of type {:?}", | |
423 | callee_ret.layout.ty, | |
424 | caller_ret.layout.ty | |
425 | ) | |
0bf4aa26 | 426 | } |
ba9703b0 XL |
427 | } else { |
428 | let local = mir::RETURN_PLACE; | |
429 | let callee_layout = self.layout_of_local(self.frame(), local, None)?; | |
430 | if !callee_layout.abi.is_uninhabited() { | |
431 | throw_ub_format!("calling a returning function without a return place") | |
0bf4aa26 | 432 | } |
ba9703b0 XL |
433 | } |
434 | }; | |
b7449926 XL |
435 | match res { |
436 | Err(err) => { | |
ba9703b0 | 437 | self.stack_mut().pop(); |
b7449926 XL |
438 | Err(err) |
439 | } | |
ba9703b0 | 440 | Ok(()) => Ok(()), |
b7449926 XL |
441 | } |
442 | } | |
443 | // cannot use the shim here, because that will only result in infinite recursion | |
444 | ty::InstanceDef::Virtual(_, idx) => { | |
48663c56 | 445 | let mut args = args.to_vec(); |
48663c56 | 446 | // We have to implement all "object safe receivers". Currently we |
1b1a35ee | 447 | // support built-in pointers `(&, &mut, Box)` as well as unsized-self. We do |
48663c56 | 448 | // not yet support custom self types. |
1b1a35ee | 449 | // Also see `compiler/rustc_codegen_llvm/src/abi.rs` and `compiler/rustc_codegen_ssa/src/mir/block.rs`. |
48663c56 XL |
450 | let receiver_place = match args[0].layout.ty.builtin_deref(true) { |
451 | Some(_) => { | |
452 | // Built-in pointer. | |
6a06907d | 453 | self.deref_operand(&args[0])? |
48663c56 XL |
454 | } |
455 | None => { | |
456 | // Unsized self. | |
dfeec247 | 457 | args[0].assert_mem_place(self) |
48663c56 XL |
458 | } |
459 | }; | |
460 | // Find and consult vtable | |
dc9dc135 | 461 | let vtable = receiver_place.vtable(); |
ba9703b0 | 462 | let drop_fn = self.get_vtable_slot(vtable, u64::try_from(idx).unwrap())?; |
b7449926 | 463 | |
48663c56 XL |
464 | // `*mut receiver_place.layout.ty` is almost the layout that we |
465 | // want for args[0]: We have to project to field 0 because we want | |
466 | // a thin pointer. | |
467 | assert!(receiver_place.layout.is_unsized()); | |
468 | let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty); | |
469 | let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?; | |
470 | // Adjust receiver argument. | |
dfeec247 | 471 | args[0] = |
ba9703b0 | 472 | OpTy::from(ImmTy::from_immediate(receiver_place.ptr.into(), this_receiver_ptr)); |
b7449926 XL |
473 | trace!("Patched self operand to {:#?}", args[0]); |
474 | // recurse with concrete function | |
ba9703b0 | 475 | self.eval_fn_call(drop_fn, caller_abi, &args, ret, unwind) |
b7449926 XL |
476 | } |
477 | } | |
478 | } | |
479 | ||
480 | fn drop_in_place( | |
481 | &mut self, | |
6a06907d | 482 | place: &PlaceTy<'tcx, M::PointerTag>, |
b7449926 | 483 | instance: ty::Instance<'tcx>, |
b7449926 | 484 | target: mir::BasicBlock, |
dfeec247 | 485 | unwind: Option<mir::BasicBlock>, |
dc9dc135 | 486 | ) -> InterpResult<'tcx> { |
b7449926 XL |
487 | trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance); |
488 | // We take the address of the object. This may well be unaligned, which is fine | |
489 | // for us here. However, unaligned accesses will probably make the actual drop | |
490 | // implementation fail -- a problem shared by rustc. | |
491 | let place = self.force_allocation(place)?; | |
492 | ||
1b1a35ee | 493 | let (instance, place) = match place.layout.ty.kind() { |
b7449926 XL |
494 | ty::Dynamic(..) => { |
495 | // Dropping a trait object. | |
6a06907d | 496 | self.unpack_dyn_trait(&place)? |
b7449926 XL |
497 | } |
498 | _ => (instance, place), | |
499 | }; | |
500 | ||
ba9703b0 XL |
501 | let arg = ImmTy::from_immediate( |
502 | place.to_ref(), | |
503 | self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?, | |
504 | ); | |
b7449926 XL |
505 | |
506 | let ty = self.tcx.mk_unit(); // return type is () | |
a1dfa0c6 | 507 | let dest = MPlaceTy::dangling(self.layout_of(ty)?, self); |
b7449926 XL |
508 | |
509 | self.eval_fn_call( | |
416331ca | 510 | FnVal::Instance(instance), |
b7449926 | 511 | Abi::Rust, |
9fa01778 | 512 | &[arg.into()], |
6a06907d | 513 | Some((&dest.into(), target)), |
17df50a5 XL |
514 | match unwind { |
515 | Some(cleanup) => StackPopUnwind::Cleanup(cleanup), | |
516 | None => StackPopUnwind::Skip, | |
517 | }, | |
b7449926 XL |
518 | ) |
519 | } | |
520 | } |