]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/trans/glue.rs
Imported Upstream version 1.1.0+dfsg1
[rustc.git] / src / librustc_trans / trans / glue.rs
1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //!
12 //
13 // Code relating to drop glue.
14
15
16 use back::abi;
17 use back::link::*;
18 use llvm;
19 use llvm::{ValueRef, get_param};
20 use metadata::csearch;
21 use middle::lang_items::ExchangeFreeFnLangItem;
22 use middle::subst;
23 use middle::subst::{Subst, Substs};
24 use middle::ty::{self, Ty};
25 use trans::adt;
26 use trans::adt::GetDtorType; // for tcx.dtor_type()
27 use trans::base::*;
28 use trans::build::*;
29 use trans::callee;
30 use trans::cleanup;
31 use trans::cleanup::CleanupMethods;
32 use trans::common::*;
33 use trans::debuginfo::DebugLoc;
34 use trans::declare;
35 use trans::expr;
36 use trans::foreign;
37 use trans::inline;
38 use trans::machine::*;
39 use trans::monomorphize;
40 use trans::type_of::{type_of, type_of_dtor, sizing_type_of, align_of};
41 use trans::type_::Type;
42 use util::ppaux;
43 use util::ppaux::{ty_to_short_str, Repr};
44
45 use arena::TypedArena;
46 use libc::c_uint;
47 use syntax::ast;
48
49 pub fn trans_exchange_free_dyn<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
50 v: ValueRef,
51 size: ValueRef,
52 align: ValueRef,
53 debug_loc: DebugLoc)
54 -> Block<'blk, 'tcx> {
55 let _icx = push_ctxt("trans_exchange_free");
56 let ccx = cx.ccx();
57 callee::trans_lang_call(cx,
58 langcall(cx, None, "", ExchangeFreeFnLangItem),
59 &[PointerCast(cx, v, Type::i8p(ccx)), size, align],
60 Some(expr::Ignore),
61 debug_loc).bcx
62 }
63
64 pub fn trans_exchange_free<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
65 v: ValueRef,
66 size: u64,
67 align: u32,
68 debug_loc: DebugLoc)
69 -> Block<'blk, 'tcx> {
70 trans_exchange_free_dyn(cx,
71 v,
72 C_uint(cx.ccx(), size),
73 C_uint(cx.ccx(), align),
74 debug_loc)
75 }
76
77 pub fn trans_exchange_free_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
78 ptr: ValueRef,
79 content_ty: Ty<'tcx>,
80 debug_loc: DebugLoc)
81 -> Block<'blk, 'tcx> {
82 assert!(type_is_sized(bcx.ccx().tcx(), content_ty));
83 let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
84 let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);
85
86 // `Box<ZeroSizeType>` does not allocate.
87 if content_size != 0 {
88 let content_align = align_of(bcx.ccx(), content_ty);
89 trans_exchange_free(bcx, ptr, content_size, content_align, debug_loc)
90 } else {
91 bcx
92 }
93 }
94
95 pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
96 t: Ty<'tcx>) -> Ty<'tcx> {
97 let tcx = ccx.tcx();
98 // Even if there is no dtor for t, there might be one deeper down and we
99 // might need to pass in the vtable ptr.
100 if !type_is_sized(tcx, t) {
101 return t
102 }
103
104 // FIXME (#22815): note that type_needs_drop conservatively
105 // approximates in some cases and may say a type expression
106 // requires drop glue when it actually does not.
107 //
108 // (In this case it is not clear whether any harm is done, i.e.
109 // erroneously returning `t` in some cases where we could have
110 // returned `tcx.types.i8` does not appear unsound. The impact on
111 // code quality is unknown at this time.)
112
113 if !type_needs_drop(tcx, t) {
114 return tcx.types.i8;
115 }
116 match t.sty {
117 ty::ty_uniq(typ) if !type_needs_drop(tcx, typ)
118 && type_is_sized(tcx, typ) => {
119 let llty = sizing_type_of(ccx, typ);
120 // `Box<ZeroSizeType>` does not allocate.
121 if llsize_of_alloc(ccx, llty) == 0 {
122 tcx.types.i8
123 } else {
124 t
125 }
126 }
127 _ => t
128 }
129 }
130
131 pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
132 v: ValueRef,
133 t: Ty<'tcx>,
134 debug_loc: DebugLoc) -> Block<'blk, 'tcx> {
135 drop_ty_core(bcx, v, t, debug_loc, false)
136 }
137
138 pub fn drop_ty_core<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
139 v: ValueRef,
140 t: Ty<'tcx>,
141 debug_loc: DebugLoc,
142 skip_dtor: bool) -> Block<'blk, 'tcx> {
143 // NB: v is an *alias* of type t here, not a direct value.
144 debug!("drop_ty_core(t={}, skip_dtor={})", t.repr(bcx.tcx()), skip_dtor);
145 let _icx = push_ctxt("drop_ty");
146 if bcx.fcx.type_needs_drop(t) {
147 let ccx = bcx.ccx();
148 let g = if skip_dtor {
149 DropGlueKind::TyContents(t)
150 } else {
151 DropGlueKind::Ty(t)
152 };
153 let glue = get_drop_glue_core(ccx, g);
154 let glue_type = get_drop_glue_type(ccx, t);
155 let ptr = if glue_type != t {
156 PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
157 } else {
158 v
159 };
160
161 Call(bcx, glue, &[ptr], None, debug_loc);
162 }
163 bcx
164 }
165
166 pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
167 v: ValueRef,
168 t: Ty<'tcx>,
169 debug_loc: DebugLoc,
170 skip_dtor: bool)
171 -> Block<'blk, 'tcx> {
172 let _icx = push_ctxt("drop_ty_immediate");
173 let vp = alloca(bcx, type_of(bcx.ccx(), t), "");
174 store_ty(bcx, v, vp, t);
175 drop_ty_core(bcx, vp, t, debug_loc, skip_dtor)
176 }
177
178 pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
179 get_drop_glue_core(ccx, DropGlueKind::Ty(t))
180 }
181
182 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
183 pub enum DropGlueKind<'tcx> {
184 /// The normal path; runs the dtor, and then recurs on the contents
185 Ty(Ty<'tcx>),
186 /// Skips the dtor, if any, for ty; drops the contents directly.
187 /// Note that the dtor is only skipped at the most *shallow*
188 /// level, namely, an `impl Drop for Ty` itself. So, for example,
189 /// if Ty is Newtype(S) then only the Drop impl for for Newtype
190 /// itself will be skipped, while the Drop impl for S, if any,
191 /// will be invoked.
192 TyContents(Ty<'tcx>),
193 }
194
195 impl<'tcx> DropGlueKind<'tcx> {
196 fn ty(&self) -> Ty<'tcx> {
197 match *self { DropGlueKind::Ty(t) | DropGlueKind::TyContents(t) => t }
198 }
199
200 fn map_ty<F>(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>) -> Ty<'tcx>
201 {
202 match *self {
203 DropGlueKind::Ty(t) => DropGlueKind::Ty(f(t)),
204 DropGlueKind::TyContents(t) => DropGlueKind::TyContents(f(t)),
205 }
206 }
207
208 fn to_string<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> String {
209 let t_str = ppaux::ty_to_string(ccx.tcx(), self.ty());
210 match *self {
211 DropGlueKind::Ty(_) => format!("DropGlueKind::Ty({})", t_str),
212 DropGlueKind::TyContents(_) => format!("DropGlueKind::TyContents({})", t_str),
213 }
214 }
215 }
216
217 fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
218 g: DropGlueKind<'tcx>) -> ValueRef {
219 debug!("make drop glue for {}", g.to_string(ccx));
220 let g = g.map_ty(|t| get_drop_glue_type(ccx, t));
221 debug!("drop glue type {}", g.to_string(ccx));
222 match ccx.drop_glues().borrow().get(&g) {
223 Some(&glue) => return glue,
224 _ => { }
225 }
226 let t = g.ty();
227
228 let llty = if type_is_sized(ccx.tcx(), t) {
229 type_of(ccx, t).ptr_to()
230 } else {
231 type_of(ccx, ty::mk_uniq(ccx.tcx(), t)).ptr_to()
232 };
233
234 let llfnty = Type::glue_fn(ccx, llty);
235
236 // To avoid infinite recursion, don't `make_drop_glue` until after we've
237 // added the entry to the `drop_glues` cache.
238 if let Some(old_sym) = ccx.available_drop_glues().borrow().get(&g) {
239 let llfn = declare::declare_cfn(ccx, &old_sym, llfnty, ty::mk_nil(ccx.tcx()));
240 ccx.drop_glues().borrow_mut().insert(g, llfn);
241 return llfn;
242 };
243
244 let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, "drop");
245 let llfn = declare::define_cfn(ccx, &fn_nm, llfnty, ty::mk_nil(ccx.tcx())).unwrap_or_else(||{
246 ccx.sess().bug(&format!("symbol `{}` already defined", fn_nm));
247 });
248 ccx.available_drop_glues().borrow_mut().insert(g, fn_nm);
249
250 let _s = StatRecorder::new(ccx, format!("drop {}", ty_to_short_str(ccx.tcx(), t)));
251
252 let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
253 let (arena, fcx): (TypedArena<_>, FunctionContext);
254 arena = TypedArena::new();
255 fcx = new_fn_ctxt(ccx, llfn, ast::DUMMY_NODE_ID, false,
256 ty::FnConverging(ty::mk_nil(ccx.tcx())),
257 empty_substs, None, &arena);
258
259 let bcx = init_function(&fcx, false, ty::FnConverging(ty::mk_nil(ccx.tcx())));
260
261 update_linkage(ccx, llfn, None, OriginalTranslation);
262
263 ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
264 // All glue functions take values passed *by alias*; this is a
265 // requirement since in many contexts glue is invoked indirectly and
266 // the caller has no idea if it's dealing with something that can be
267 // passed by value.
268 //
269 // llfn is expected be declared to take a parameter of the appropriate
270 // type, so we don't need to explicitly cast the function parameter.
271
272 let llrawptr0 = get_param(llfn, fcx.arg_pos(0) as c_uint);
273 let bcx = make_drop_glue(bcx, llrawptr0, g);
274 finish_fn(&fcx, bcx, ty::FnConverging(ty::mk_nil(ccx.tcx())), DebugLoc::None);
275
276 llfn
277 }
278
279 fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
280 t: Ty<'tcx>,
281 struct_data: ValueRef,
282 dtor_did: ast::DefId,
283 class_did: ast::DefId,
284 substs: &subst::Substs<'tcx>)
285 -> Block<'blk, 'tcx> {
286 assert!(type_is_sized(bcx.tcx(), t), "Precondition: caller must ensure t is sized");
287
288 let repr = adt::represent_type(bcx.ccx(), t);
289 let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
290 let loaded = load_ty(bcx, drop_flag.val, bcx.tcx().dtor_type());
291 let drop_flag_llty = type_of(bcx.fcx.ccx, bcx.tcx().dtor_type());
292 let init_val = C_integral(drop_flag_llty, adt::DTOR_NEEDED as u64, false);
293
294 let bcx = if !bcx.ccx().check_drop_flag_for_sanity() {
295 bcx
296 } else {
297 let drop_flag_llty = type_of(bcx.fcx.ccx, bcx.tcx().dtor_type());
298 let done_val = C_integral(drop_flag_llty, adt::DTOR_DONE as u64, false);
299 let not_init = ICmp(bcx, llvm::IntNE, loaded, init_val, DebugLoc::None);
300 let not_done = ICmp(bcx, llvm::IntNE, loaded, done_val, DebugLoc::None);
301 let drop_flag_neither_initialized_nor_cleared =
302 And(bcx, not_init, not_done, DebugLoc::None);
303 with_cond(bcx, drop_flag_neither_initialized_nor_cleared, |cx| {
304 let llfn = cx.ccx().get_intrinsic(&("llvm.debugtrap"));
305 Call(cx, llfn, &[], None, DebugLoc::None);
306 cx
307 })
308 };
309
310 let drop_flag_dtor_needed = ICmp(bcx, llvm::IntEQ, loaded, init_val, DebugLoc::None);
311 with_cond(bcx, drop_flag_dtor_needed, |cx| {
312 trans_struct_drop(cx, t, struct_data, dtor_did, class_did, substs)
313 })
314 }
315
316 pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
317 did: ast::DefId,
318 t: Ty<'tcx>,
319 parent_id: ast::DefId,
320 substs: &Substs<'tcx>)
321 -> ValueRef {
322 let _icx = push_ctxt("trans_res_dtor");
323 let did = inline::maybe_instantiate_inline(ccx, did);
324
325 if !substs.types.is_empty() {
326 assert_eq!(did.krate, ast::LOCAL_CRATE);
327
328 // Since we're in trans we don't care for any region parameters
329 let substs = ccx.tcx().mk_substs(Substs::erased(substs.types.clone()));
330
331 let (val, _, _) = monomorphize::monomorphic_fn(ccx, did, substs, None);
332
333 val
334 } else if did.krate == ast::LOCAL_CRATE {
335 get_item_val(ccx, did.node)
336 } else {
337 let tcx = ccx.tcx();
338 let name = csearch::get_symbol(&ccx.sess().cstore, did);
339 let class_ty = ty::lookup_item_type(tcx, parent_id).ty.subst(tcx, substs);
340 let llty = type_of_dtor(ccx, class_ty);
341 let dtor_ty = ty::mk_ctor_fn(ccx.tcx(),
342 did,
343 &[get_drop_glue_type(ccx, t)],
344 ty::mk_nil(ccx.tcx()));
345 foreign::get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), &name[..], llvm::CCallConv,
346 llty, dtor_ty)
347 }
348 }
349
350 fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
351 t: Ty<'tcx>,
352 v0: ValueRef,
353 dtor_did: ast::DefId,
354 class_did: ast::DefId,
355 substs: &subst::Substs<'tcx>)
356 -> Block<'blk, 'tcx>
357 {
358 debug!("trans_struct_drop t: {}", bcx.ty_to_string(t));
359
360 // Find and call the actual destructor
361 let dtor_addr = get_res_dtor(bcx.ccx(), dtor_did, t, class_did, substs);
362
363 // Class dtors have no explicit args, so the params should
364 // just consist of the environment (self).
365 let params = unsafe {
366 let ty = Type::from_ref(llvm::LLVMTypeOf(dtor_addr));
367 ty.element_type().func_params()
368 };
369 assert_eq!(params.len(), 1);
370
371 // Be sure to put the contents into a scope so we can use an invoke
372 // instruction to call the user destructor but still call the field
373 // destructors if the user destructor panics.
374 //
375 // FIXME (#14875) panic-in-drop semantics might be unsupported; we
376 // might well consider changing below to more direct code.
377 let contents_scope = bcx.fcx.push_custom_cleanup_scope();
378
379 // Issue #23611: schedule cleanup of contents, re-inspecting the
380 // discriminant (if any) in case of variant swap in drop code.
381 bcx.fcx.schedule_drop_adt_contents(cleanup::CustomScope(contents_scope), v0, t);
382
383 let glue_type = get_drop_glue_type(bcx.ccx(), t);
384 let dtor_ty = ty::mk_ctor_fn(bcx.tcx(), class_did, &[glue_type], ty::mk_nil(bcx.tcx()));
385 let (_, bcx) = invoke(bcx, dtor_addr, &[v0], dtor_ty, DebugLoc::None);
386
387 bcx.fcx.pop_and_trans_custom_cleanup_scope(bcx, contents_scope)
388 }
389
390 pub fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
391 -> (ValueRef, ValueRef) {
392 debug!("calculate size of DST: {}; with lost info: {}",
393 bcx.ty_to_string(t), bcx.val_to_string(info));
394 if type_is_sized(bcx.tcx(), t) {
395 let sizing_type = sizing_type_of(bcx.ccx(), t);
396 let size = C_uint(bcx.ccx(), llsize_of_alloc(bcx.ccx(), sizing_type));
397 let align = C_uint(bcx.ccx(), align_of(bcx.ccx(), t));
398 return (size, align);
399 }
400 match t.sty {
401 ty::ty_struct(id, substs) => {
402 let ccx = bcx.ccx();
403 // First get the size of all statically known fields.
404 // Don't use type_of::sizing_type_of because that expects t to be sized.
405 assert!(!ty::type_is_simd(bcx.tcx(), t));
406 let repr = adt::represent_type(ccx, t);
407 let sizing_type = adt::sizing_type_of(ccx, &*repr, true);
408 let sized_size = C_uint(ccx, llsize_of_alloc(ccx, sizing_type));
409 let sized_align = C_uint(ccx, llalign_of_min(ccx, sizing_type));
410
411 // Recurse to get the size of the dynamically sized field (must be
412 // the last field).
413 let fields = ty::struct_fields(bcx.tcx(), id, substs);
414 let last_field = fields[fields.len()-1];
415 let field_ty = last_field.mt.ty;
416 let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
417
418 // Return the sum of sizes and max of aligns.
419 let size = Add(bcx, sized_size, unsized_size, DebugLoc::None);
420 let align = Select(bcx,
421 ICmp(bcx,
422 llvm::IntULT,
423 sized_align,
424 unsized_align,
425 DebugLoc::None),
426 sized_align,
427 unsized_align);
428 (size, align)
429 }
430 ty::ty_trait(..) => {
431 // info points to the vtable and the second entry in the vtable is the
432 // dynamic size of the object.
433 let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
434 let size_ptr = GEPi(bcx, info, &[1]);
435 let align_ptr = GEPi(bcx, info, &[2]);
436 (Load(bcx, size_ptr), Load(bcx, align_ptr))
437 }
438 ty::ty_vec(_, None) | ty::ty_str => {
439 let unit_ty = ty::sequence_element_type(bcx.tcx(), t);
440 // The info in this case is the length of the str, so the size is that
441 // times the unit size.
442 let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
443 let unit_align = llalign_of_min(bcx.ccx(), llunit_ty);
444 let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
445 (Mul(bcx, info, C_uint(bcx.ccx(), unit_size), DebugLoc::None),
446 C_uint(bcx.ccx(), unit_align))
447 }
448 _ => bcx.sess().bug(&format!("Unexpected unsized type, found {}",
449 bcx.ty_to_string(t)))
450 }
451 }
452
453 fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, g: DropGlueKind<'tcx>)
454 -> Block<'blk, 'tcx> {
455 let t = g.ty();
456 let skip_dtor = match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true };
457 // NB: v0 is an *alias* of type t here, not a direct value.
458 let _icx = push_ctxt("make_drop_glue");
459
460 // Only drop the value when it ... well, we used to check for
461 // non-null, (and maybe we need to continue doing so), but we now
462 // must definitely check for special bit-patterns corresponding to
463 // the special dtor markings.
464
465 let inttype = Type::int(bcx.ccx());
466 let dropped_pattern = C_integral(inttype, adt::dtor_done_usize(bcx.fcx.ccx) as u64, false);
467
468 match t.sty {
469 ty::ty_uniq(content_ty) => {
470 // Support for ty_uniq is built-in and its drop glue is
471 // special. It may move to library and have Drop impl. As
472 // a safe-guard, assert ty_uniq not used with TyContents.
473 assert!(!skip_dtor);
474 if !type_is_sized(bcx.tcx(), content_ty) {
475 let llval = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
476 let llbox = Load(bcx, llval);
477 let llbox_as_usize = PtrToInt(bcx, llbox, Type::int(bcx.ccx()));
478 let drop_flag_not_dropped_already =
479 ICmp(bcx, llvm::IntNE, llbox_as_usize, dropped_pattern, DebugLoc::None);
480 with_cond(bcx, drop_flag_not_dropped_already, |bcx| {
481 let bcx = drop_ty(bcx, v0, content_ty, DebugLoc::None);
482 let info = GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]);
483 let info = Load(bcx, info);
484 let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
485
486 // `Box<ZeroSizeType>` does not allocate.
487 let needs_free = ICmp(bcx,
488 llvm::IntNE,
489 llsize,
490 C_uint(bcx.ccx(), 0u64),
491 DebugLoc::None);
492 with_cond(bcx, needs_free, |bcx| {
493 trans_exchange_free_dyn(bcx, llbox, llsize, llalign, DebugLoc::None)
494 })
495 })
496 } else {
497 let llval = v0;
498 let llbox = Load(bcx, llval);
499 let llbox_as_usize = PtrToInt(bcx, llbox, inttype);
500 let drop_flag_not_dropped_already =
501 ICmp(bcx, llvm::IntNE, llbox_as_usize, dropped_pattern, DebugLoc::None);
502 with_cond(bcx, drop_flag_not_dropped_already, |bcx| {
503 let bcx = drop_ty(bcx, llbox, content_ty, DebugLoc::None);
504 trans_exchange_free_ty(bcx, llbox, content_ty, DebugLoc::None)
505 })
506 }
507 }
508 ty::ty_struct(did, substs) | ty::ty_enum(did, substs) => {
509 let tcx = bcx.tcx();
510 match (ty::ty_dtor(tcx, did), skip_dtor) {
511 (ty::TraitDtor(dtor, true), false) => {
512 // FIXME(16758) Since the struct is unsized, it is hard to
513 // find the drop flag (which is at the end of the struct).
514 // Lets just ignore the flag and pretend everything will be
515 // OK.
516 if type_is_sized(bcx.tcx(), t) {
517 trans_struct_drop_flag(bcx, t, v0, dtor, did, substs)
518 } else {
519 // Give the user a heads up that we are doing something
520 // stupid and dangerous.
521 bcx.sess().warn(&format!("Ignoring drop flag in destructor for {}\
522 because the struct is unsized. See issue\
523 #16758",
524 bcx.ty_to_string(t)));
525 trans_struct_drop(bcx, t, v0, dtor, did, substs)
526 }
527 }
528 (ty::TraitDtor(dtor, false), false) => {
529 trans_struct_drop(bcx, t, v0, dtor, did, substs)
530 }
531 (ty::NoDtor, _) | (_, true) => {
532 // No dtor? Just the default case
533 iter_structural_ty(bcx, v0, t, |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
534 }
535 }
536 }
537 ty::ty_trait(..) => {
538 // No support in vtable for distinguishing destroying with
539 // versus without calling Drop::drop. Assert caller is
540 // okay with always calling the Drop impl, if any.
541 assert!(!skip_dtor);
542 let data_ptr = GEPi(bcx, v0, &[0, abi::FAT_PTR_ADDR]);
543 let vtable_ptr = Load(bcx, GEPi(bcx, v0, &[0, abi::FAT_PTR_EXTRA]));
544 let dtor = Load(bcx, vtable_ptr);
545 Call(bcx,
546 dtor,
547 &[PointerCast(bcx, Load(bcx, data_ptr), Type::i8p(bcx.ccx()))],
548 None,
549 DebugLoc::None);
550 bcx
551 }
552 _ => {
553 if bcx.fcx.type_needs_drop(t) {
554 iter_structural_ty(bcx,
555 v0,
556 t,
557 |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
558 } else {
559 bcx
560 }
561 }
562 }
563 }