1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 // Code relating to drop glue.
17 use back
::symbol_names
;
19 use llvm
::{ValueRef, get_param}
;
20 use middle
::lang_items
::ExchangeFreeFnLangItem
;
21 use rustc
::ty
::subst
::{Substs}
;
23 use rustc
::ty
::{self, Ty, TyCtxt}
;
24 use abi
::{Abi, FnType}
;
26 use adt
::GetDtorType
; // for tcx.dtor_type()
29 use callee
::{Callee, ArgVals}
;
31 use cleanup
::CleanupMethods
;
32 use collector
::{self, TransItem}
;
34 use debuginfo
::DebugLoc
;
39 use type_of
::{type_of, sizing_type_of, align_of}
;
43 use arena
::TypedArena
;
44 use syntax
::codemap
::DUMMY_SP
;
46 pub fn trans_exchange_free_dyn
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
51 -> Block
<'blk
, 'tcx
> {
52 let _icx
= push_ctxt("trans_exchange_free");
54 let def_id
= langcall(bcx
, None
, "", ExchangeFreeFnLangItem
);
55 let args
= [PointerCast(bcx
, v
, Type
::i8p(bcx
.ccx())), size
, align
];
56 Callee
::def(bcx
.ccx(), def_id
, bcx
.tcx().mk_substs(Substs
::empty()))
57 .call(bcx
, debug_loc
, ArgVals(&args
), None
).bcx
60 pub fn trans_exchange_free
<'blk
, 'tcx
>(cx
: Block
<'blk
, 'tcx
>,
65 -> Block
<'blk
, 'tcx
> {
66 trans_exchange_free_dyn(cx
,
68 C_uint(cx
.ccx(), size
),
69 C_uint(cx
.ccx(), align
),
73 pub fn trans_exchange_free_ty
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
77 -> Block
<'blk
, 'tcx
> {
78 assert
!(type_is_sized(bcx
.ccx().tcx(), content_ty
));
79 let sizing_type
= sizing_type_of(bcx
.ccx(), content_ty
);
80 let content_size
= llsize_of_alloc(bcx
.ccx(), sizing_type
);
82 // `Box<ZeroSizeType>` does not allocate.
83 if content_size
!= 0 {
84 let content_align
= align_of(bcx
.ccx(), content_ty
);
85 trans_exchange_free(bcx
, ptr
, content_size
, content_align
, debug_loc
)
91 pub fn type_needs_drop
<'tcx
>(tcx
: &TyCtxt
<'tcx
>, ty
: Ty
<'tcx
>) -> bool
{
92 tcx
.type_needs_drop_given_env(ty
, &tcx
.empty_parameter_environment())
95 pub fn get_drop_glue_type
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
96 t
: Ty
<'tcx
>) -> Ty
<'tcx
> {
98 // Even if there is no dtor for t, there might be one deeper down and we
99 // might need to pass in the vtable ptr.
100 if !type_is_sized(tcx
, t
) {
104 // FIXME (#22815): note that type_needs_drop conservatively
105 // approximates in some cases and may say a type expression
106 // requires drop glue when it actually does not.
108 // (In this case it is not clear whether any harm is done, i.e.
109 // erroneously returning `t` in some cases where we could have
110 // returned `tcx.types.i8` does not appear unsound. The impact on
111 // code quality is unknown at this time.)
113 if !type_needs_drop(&tcx
, t
) {
117 ty
::TyBox(typ
) if !type_needs_drop(&tcx
, typ
)
118 && type_is_sized(tcx
, typ
) => {
119 let llty
= sizing_type_of(ccx
, typ
);
120 // `Box<ZeroSizeType>` does not allocate.
121 if llsize_of_alloc(ccx
, llty
) == 0 {
131 pub fn drop_ty
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
134 debug_loc
: DebugLoc
) -> Block
<'blk
, 'tcx
> {
135 drop_ty_core(bcx
, v
, t
, debug_loc
, false, None
)
138 pub fn drop_ty_core
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
143 drop_hint
: Option
<cleanup
::DropHintValue
>)
144 -> Block
<'blk
, 'tcx
> {
145 // NB: v is an *alias* of type t here, not a direct value.
146 debug
!("drop_ty_core(t={:?}, skip_dtor={} drop_hint={:?})", t
, skip_dtor
, drop_hint
);
147 let _icx
= push_ctxt("drop_ty");
149 if bcx
.fcx
.type_needs_drop(t
) {
151 let g
= if skip_dtor
{
152 DropGlueKind
::TyContents(t
)
156 let glue
= get_drop_glue_core(ccx
, g
);
157 let glue_type
= get_drop_glue_type(ccx
, t
);
158 let ptr
= if glue_type
!= t
{
159 PointerCast(bcx
, v
, type_of(ccx
, glue_type
).ptr_to())
166 let hint_val
= load_ty(bcx
, drop_hint
.value(), bcx
.tcx().types
.u8);
168 C_integral(Type
::i8(bcx
.ccx()), adt
::DTOR_MOVED_HINT
as u64, false);
170 ICmp(bcx
, llvm
::IntNE
, hint_val
, moved_val
, DebugLoc
::None
);
171 bcx
= with_cond(bcx
, may_need_drop
, |cx
| {
172 Call(cx
, glue
, &[ptr
], debug_loc
);
177 // No drop-hint ==> call standard drop glue
178 Call(bcx
, glue
, &[ptr
], debug_loc
);
185 pub fn drop_ty_immediate
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
190 -> Block
<'blk
, 'tcx
> {
191 let _icx
= push_ctxt("drop_ty_immediate");
192 let vp
= alloc_ty(bcx
, t
, "");
193 call_lifetime_start(bcx
, vp
);
194 store_ty(bcx
, v
, vp
, t
);
195 let bcx
= drop_ty_core(bcx
, vp
, t
, debug_loc
, skip_dtor
, None
);
196 call_lifetime_end(bcx
, vp
);
200 pub fn get_drop_glue
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>, t
: Ty
<'tcx
>) -> ValueRef
{
201 get_drop_glue_core(ccx
, DropGlueKind
::Ty(t
))
204 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
205 pub enum DropGlueKind
<'tcx
> {
206 /// The normal path; runs the dtor, and then recurs on the contents
208 /// Skips the dtor, if any, for ty; drops the contents directly.
209 /// Note that the dtor is only skipped at the most *shallow*
210 /// level, namely, an `impl Drop for Ty` itself. So, for example,
211 /// if Ty is Newtype(S) then only the Drop impl for Newtype itself
212 /// will be skipped, while the Drop impl for S, if any, will be
214 TyContents(Ty
<'tcx
>),
217 impl<'tcx
> DropGlueKind
<'tcx
> {
218 fn ty(&self) -> Ty
<'tcx
> {
219 match *self { DropGlueKind::Ty(t) | DropGlueKind::TyContents(t) => t }
222 fn map_ty
<F
>(&self, mut f
: F
) -> DropGlueKind
<'tcx
> where F
: FnMut(Ty
<'tcx
>) -> Ty
<'tcx
>
225 DropGlueKind
::Ty(t
) => DropGlueKind
::Ty(f(t
)),
226 DropGlueKind
::TyContents(t
) => DropGlueKind
::TyContents(f(t
)),
231 fn get_drop_glue_core
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
232 g
: DropGlueKind
<'tcx
>) -> ValueRef
{
233 debug
!("make drop glue for {:?}", g
);
234 let g
= g
.map_ty(|t
| get_drop_glue_type(ccx
, t
));
235 debug
!("drop glue type {:?}", g
);
236 match ccx
.drop_glues().borrow().get(&g
) {
237 Some(&glue
) => return glue
,
243 let sig
= ty
::FnSig
{
244 inputs
: vec
![tcx
.mk_mut_ptr(tcx
.types
.i8)],
245 output
: ty
::FnOutput
::FnConverging(tcx
.mk_nil()),
248 // Create a FnType for fn(*mut i8) and substitute the real type in
249 // later - that prevents FnType from splitting fat pointers up.
250 let mut fn_ty
= FnType
::new(ccx
, Abi
::Rust
, &sig
, &[]);
251 fn_ty
.args
[0].original_ty
= type_of(ccx
, t
).ptr_to();
252 let llfnty
= fn_ty
.llvm_type(ccx
);
254 // To avoid infinite recursion, don't `make_drop_glue` until after we've
255 // added the entry to the `drop_glues` cache.
256 if let Some(old_sym
) = ccx
.available_drop_glues().borrow().get(&g
) {
257 let llfn
= declare
::declare_cfn(ccx
, &old_sym
, llfnty
);
258 ccx
.drop_glues().borrow_mut().insert(g
, llfn
);
262 let suffix
= match g
{
263 DropGlueKind
::Ty(_
) => "drop",
264 DropGlueKind
::TyContents(_
) => "drop_contents",
267 let fn_nm
= symbol_names
::internal_name_from_type_and_suffix(ccx
, t
, suffix
);
268 assert
!(declare
::get_defined_value(ccx
, &fn_nm
).is_none());
269 let llfn
= declare
::declare_cfn(ccx
, &fn_nm
, llfnty
);
270 ccx
.available_drop_glues().borrow_mut().insert(g
, fn_nm
);
271 ccx
.drop_glues().borrow_mut().insert(g
, llfn
);
273 let _s
= StatRecorder
::new(ccx
, format
!("drop {:?}", t
));
275 let empty_substs
= ccx
.tcx().mk_substs(Substs
::empty());
276 let (arena
, fcx
): (TypedArena
<_
>, FunctionContext
);
277 arena
= TypedArena
::new();
278 fcx
= FunctionContext
::new(ccx
, llfn
, fn_ty
, None
, empty_substs
, &arena
);
280 let bcx
= fcx
.init(false, None
);
282 update_linkage(ccx
, llfn
, None
, OriginalTranslation
);
284 ccx
.stats().n_glues_created
.set(ccx
.stats().n_glues_created
.get() + 1);
285 // All glue functions take values passed *by alias*; this is a
286 // requirement since in many contexts glue is invoked indirectly and
287 // the caller has no idea if it's dealing with something that can be
290 // llfn is expected be declared to take a parameter of the appropriate
291 // type, so we don't need to explicitly cast the function parameter.
293 let bcx
= make_drop_glue(bcx
, get_param(llfn
, 0), g
);
294 fcx
.finish(bcx
, DebugLoc
::None
);
299 fn trans_struct_drop_flag
<'blk
, 'tcx
>(mut bcx
: Block
<'blk
, 'tcx
>,
301 struct_data
: ValueRef
)
302 -> Block
<'blk
, 'tcx
> {
303 assert
!(type_is_sized(bcx
.tcx(), t
), "Precondition: caller must ensure t is sized");
305 let repr
= adt
::represent_type(bcx
.ccx(), t
);
306 let drop_flag
= unpack_datum
!(bcx
, adt
::trans_drop_flag_ptr(bcx
, &repr
, struct_data
));
307 let loaded
= load_ty(bcx
, drop_flag
.val
, bcx
.tcx().dtor_type());
308 let drop_flag_llty
= type_of(bcx
.fcx
.ccx
, bcx
.tcx().dtor_type());
309 let init_val
= C_integral(drop_flag_llty
, adt
::DTOR_NEEDED
as u64, false);
311 let bcx
= if !bcx
.ccx().check_drop_flag_for_sanity() {
314 let drop_flag_llty
= type_of(bcx
.fcx
.ccx
, bcx
.tcx().dtor_type());
315 let done_val
= C_integral(drop_flag_llty
, adt
::DTOR_DONE
as u64, false);
316 let not_init
= ICmp(bcx
, llvm
::IntNE
, loaded
, init_val
, DebugLoc
::None
);
317 let not_done
= ICmp(bcx
, llvm
::IntNE
, loaded
, done_val
, DebugLoc
::None
);
318 let drop_flag_neither_initialized_nor_cleared
=
319 And(bcx
, not_init
, not_done
, DebugLoc
::None
);
320 with_cond(bcx
, drop_flag_neither_initialized_nor_cleared
, |cx
| {
321 let llfn
= cx
.ccx().get_intrinsic(&("llvm.debugtrap"));
322 Call(cx
, llfn
, &[], DebugLoc
::None
);
327 let drop_flag_dtor_needed
= ICmp(bcx
, llvm
::IntEQ
, loaded
, init_val
, DebugLoc
::None
);
328 with_cond(bcx
, drop_flag_dtor_needed
, |cx
| {
329 trans_struct_drop(cx
, t
, struct_data
)
332 fn trans_struct_drop
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
337 debug
!("trans_struct_drop t: {}", t
);
341 let def
= t
.ty_adt_def().unwrap();
343 // Be sure to put the contents into a scope so we can use an invoke
344 // instruction to call the user destructor but still call the field
345 // destructors if the user destructor panics.
347 // FIXME (#14875) panic-in-drop semantics might be unsupported; we
348 // might well consider changing below to more direct code.
349 let contents_scope
= bcx
.fcx
.push_custom_cleanup_scope();
351 // Issue #23611: schedule cleanup of contents, re-inspecting the
352 // discriminant (if any) in case of variant swap in drop code.
353 bcx
.fcx
.schedule_drop_adt_contents(cleanup
::CustomScope(contents_scope
), v0
, t
);
355 let (sized_args
, unsized_args
);
356 let args
: &[ValueRef
] = if type_is_sized(tcx
, t
) {
360 unsized_args
= [Load(bcx
, expr
::get_dataptr(bcx
, v0
)), Load(bcx
, expr
::get_meta(bcx
, v0
))];
364 let trait_ref
= ty
::Binder(ty
::TraitRef
{
365 def_id
: tcx
.lang_items
.drop_trait().unwrap(),
366 substs
: tcx
.mk_substs(Substs
::empty().with_self_ty(t
))
368 let vtbl
= match fulfill_obligation(bcx
.ccx(), DUMMY_SP
, trait_ref
) {
369 traits
::VtableImpl(data
) => data
,
370 _
=> bug
!("dtor for {:?} is not an impl???", t
)
372 let dtor_did
= def
.destructor().unwrap();
373 bcx
= Callee
::def(bcx
.ccx(), dtor_did
, vtbl
.substs
)
374 .call(bcx
, DebugLoc
::None
, ArgVals(args
), None
).bcx
;
376 bcx
.fcx
.pop_and_trans_custom_cleanup_scope(bcx
, contents_scope
)
379 pub fn size_and_align_of_dst
<'blk
, 'tcx
>(bcx
: &BlockAndBuilder
<'blk
, 'tcx
>,
380 t
: Ty
<'tcx
>, info
: ValueRef
)
381 -> (ValueRef
, ValueRef
) {
382 debug
!("calculate size of DST: {}; with lost info: {:?}",
384 if type_is_sized(bcx
.tcx(), t
) {
385 let sizing_type
= sizing_type_of(bcx
.ccx(), t
);
386 let size
= llsize_of_alloc(bcx
.ccx(), sizing_type
);
387 let align
= align_of(bcx
.ccx(), t
);
388 debug
!("size_and_align_of_dst t={} info={:?} size: {} align: {}",
389 t
, Value(info
), size
, align
);
390 let size
= C_uint(bcx
.ccx(), size
);
391 let align
= C_uint(bcx
.ccx(), align
);
392 return (size
, align
);
394 if bcx
.is_unreachable() {
395 let llty
= Type
::int(bcx
.ccx());
396 return (C_undef(llty
), C_undef(llty
));
399 ty
::TyStruct(def
, substs
) => {
401 // First get the size of all statically known fields.
402 // Don't use type_of::sizing_type_of because that expects t to be sized.
403 assert
!(!t
.is_simd());
404 let repr
= adt
::represent_type(ccx
, t
);
405 let sizing_type
= adt
::sizing_type_context_of(ccx
, &repr
, true);
406 debug
!("DST {} sizing_type: {:?}", t
, sizing_type
);
407 let sized_size
= llsize_of_alloc(ccx
, sizing_type
.prefix());
408 let sized_align
= llalign_of_min(ccx
, sizing_type
.prefix());
409 debug
!("DST {} statically sized prefix size: {} align: {}",
410 t
, sized_size
, sized_align
);
411 let sized_size
= C_uint(ccx
, sized_size
);
412 let sized_align
= C_uint(ccx
, sized_align
);
414 // Recurse to get the size of the dynamically sized field (must be
416 let last_field
= def
.struct_variant().fields
.last().unwrap();
417 let field_ty
= monomorphize
::field_ty(bcx
.tcx(), substs
, last_field
);
418 let (unsized_size
, unsized_align
) = size_and_align_of_dst(bcx
, field_ty
, info
);
420 // FIXME (#26403, #27023): We should be adding padding
421 // to `sized_size` (to accommodate the `unsized_align`
422 // required of the unsized field that follows) before
423 // summing it with `sized_size`. (Note that since #26403
424 // is unfixed, we do not yet add the necessary padding
425 // here. But this is where the add would go.)
427 // Return the sum of sizes and max of aligns.
428 let mut size
= bcx
.add(sized_size
, unsized_size
);
430 // Issue #27023: If there is a drop flag, *now* we add 1
431 // to the size. (We can do this without adding any
432 // padding because drop flags do not have any alignment
434 if sizing_type
.needs_drop_flag() {
435 size
= bcx
.add(size
, C_uint(bcx
.ccx(), 1_u64));
438 // Choose max of two known alignments (combined value must
439 // be aligned according to more restrictive of the two).
440 let align
= match (const_to_opt_uint(sized_align
), const_to_opt_uint(unsized_align
)) {
441 (Some(sized_align
), Some(unsized_align
)) => {
442 // If both alignments are constant, (the sized_align should always be), then
443 // pick the correct alignment statically.
444 C_uint(ccx
, std
::cmp
::max(sized_align
, unsized_align
))
446 _
=> bcx
.select(bcx
.icmp(llvm
::IntUGT
, sized_align
, unsized_align
),
451 // Issue #27023: must add any necessary padding to `size`
452 // (to make it a multiple of `align`) before returning it.
454 // Namely, the returned size should be, in C notation:
456 // `size + ((size & (align-1)) ? align : 0)`
458 // emulated via the semi-standard fast bit trick:
460 // `(size + (align-1)) & -align`
462 let addend
= bcx
.sub(align
, C_uint(bcx
.ccx(), 1_u64));
463 let size
= bcx
.and(bcx
.add(size
, addend
), bcx
.neg(align
));
468 // info points to the vtable and the second entry in the vtable is the
469 // dynamic size of the object.
470 let info
= bcx
.pointercast(info
, Type
::int(bcx
.ccx()).ptr_to());
471 let size_ptr
= bcx
.gepi(info
, &[1]);
472 let align_ptr
= bcx
.gepi(info
, &[2]);
473 (bcx
.load(size_ptr
), bcx
.load(align_ptr
))
475 ty
::TySlice(_
) | ty
::TyStr
=> {
476 let unit_ty
= t
.sequence_element_type(bcx
.tcx());
477 // The info in this case is the length of the str, so the size is that
478 // times the unit size.
479 let llunit_ty
= sizing_type_of(bcx
.ccx(), unit_ty
);
480 let unit_align
= llalign_of_min(bcx
.ccx(), llunit_ty
);
481 let unit_size
= llsize_of_alloc(bcx
.ccx(), llunit_ty
);
482 (bcx
.mul(info
, C_uint(bcx
.ccx(), unit_size
)),
483 C_uint(bcx
.ccx(), unit_align
))
485 _
=> bug
!("Unexpected unsized type, found {}", t
)
489 fn make_drop_glue
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>, v0
: ValueRef
, g
: DropGlueKind
<'tcx
>)
490 -> Block
<'blk
, 'tcx
> {
493 if collector
::collecting_debug_information(bcx
.ccx()) {
495 .record_translation_item_as_generated(TransItem
::DropGlue(bcx
.tcx()
496 .erase_regions(&t
)));
499 let skip_dtor
= match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true }
;
500 // NB: v0 is an *alias* of type t here, not a direct value.
501 let _icx
= push_ctxt("make_drop_glue");
503 // Only drop the value when it ... well, we used to check for
504 // non-null, (and maybe we need to continue doing so), but we now
505 // must definitely check for special bit-patterns corresponding to
506 // the special dtor markings.
508 let inttype
= Type
::int(bcx
.ccx());
509 let dropped_pattern
= C_integral(inttype
, adt
::DTOR_DONE_U64
, false);
512 ty
::TyBox(content_ty
) => {
513 // Support for TyBox is built-in and its drop glue is
514 // special. It may move to library and have Drop impl. As
515 // a safe-guard, assert TyBox not used with TyContents.
517 if !type_is_sized(bcx
.tcx(), content_ty
) {
518 let llval
= expr
::get_dataptr(bcx
, v0
);
519 let llbox
= Load(bcx
, llval
);
520 let llbox_as_usize
= PtrToInt(bcx
, llbox
, Type
::int(bcx
.ccx()));
521 let drop_flag_not_dropped_already
=
522 ICmp(bcx
, llvm
::IntNE
, llbox_as_usize
, dropped_pattern
, DebugLoc
::None
);
523 with_cond(bcx
, drop_flag_not_dropped_already
, |bcx
| {
524 let bcx
= drop_ty(bcx
, v0
, content_ty
, DebugLoc
::None
);
525 let info
= expr
::get_meta(bcx
, v0
);
526 let info
= Load(bcx
, info
);
527 let (llsize
, llalign
) =
528 size_and_align_of_dst(&bcx
.build(), content_ty
, info
);
530 // `Box<ZeroSizeType>` does not allocate.
531 let needs_free
= ICmp(bcx
,
534 C_uint(bcx
.ccx(), 0u64),
536 with_cond(bcx
, needs_free
, |bcx
| {
537 trans_exchange_free_dyn(bcx
, llbox
, llsize
, llalign
, DebugLoc
::None
)
542 let llbox
= Load(bcx
, llval
);
543 let llbox_as_usize
= PtrToInt(bcx
, llbox
, inttype
);
544 let drop_flag_not_dropped_already
=
545 ICmp(bcx
, llvm
::IntNE
, llbox_as_usize
, dropped_pattern
, DebugLoc
::None
);
546 with_cond(bcx
, drop_flag_not_dropped_already
, |bcx
| {
547 let bcx
= drop_ty(bcx
, llbox
, content_ty
, DebugLoc
::None
);
548 trans_exchange_free_ty(bcx
, llbox
, content_ty
, DebugLoc
::None
)
552 ty
::TyStruct(def
, _
) | ty
::TyEnum(def
, _
) => {
553 match (def
.dtor_kind(), skip_dtor
) {
554 (ty
::TraitDtor(true), false) => {
555 // FIXME(16758) Since the struct is unsized, it is hard to
556 // find the drop flag (which is at the end of the struct).
557 // Lets just ignore the flag and pretend everything will be
559 if type_is_sized(bcx
.tcx(), t
) {
560 trans_struct_drop_flag(bcx
, t
, v0
)
562 // Give the user a heads up that we are doing something
563 // stupid and dangerous.
564 bcx
.sess().warn(&format
!("Ignoring drop flag in destructor for {} \
565 because the struct is unsized. See issue \
567 trans_struct_drop(bcx
, t
, v0
)
570 (ty
::TraitDtor(false), false) => {
571 trans_struct_drop(bcx
, t
, v0
)
573 (ty
::NoDtor
, _
) | (_
, true) => {
574 // No dtor? Just the default case
575 iter_structural_ty(bcx
, v0
, t
, |bb
, vv
, tt
| drop_ty(bb
, vv
, tt
, DebugLoc
::None
))
580 // No support in vtable for distinguishing destroying with
581 // versus without calling Drop::drop. Assert caller is
582 // okay with always calling the Drop impl, if any.
584 let data_ptr
= expr
::get_dataptr(bcx
, v0
);
585 let vtable_ptr
= Load(bcx
, expr
::get_meta(bcx
, v0
));
586 let dtor
= Load(bcx
, vtable_ptr
);
589 &[PointerCast(bcx
, Load(bcx
, data_ptr
), Type
::i8p(bcx
.ccx()))],
594 if bcx
.fcx
.type_needs_drop(t
) {
595 iter_structural_ty(bcx
,
598 |bb
, vv
, tt
| drop_ty(bb
, vv
, tt
, DebugLoc
::None
))