1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 // Code relating to drop glue.
19 use llvm
::{ValueRef, get_param}
;
20 use metadata
::csearch
;
21 use middle
::lang_items
::ExchangeFreeFnLangItem
;
23 use middle
::subst
::{Subst, Substs}
;
24 use middle
::ty
::{self, Ty}
;
26 use trans
::adt
::GetDtorType
; // for tcx.dtor_type()
31 use trans
::cleanup
::CleanupMethods
;
33 use trans
::debuginfo
::DebugLoc
;
38 use trans
::machine
::*;
39 use trans
::monomorphize
;
40 use trans
::type_of
::{type_of, type_of_dtor, sizing_type_of, align_of}
;
41 use trans
::type_
::Type
;
43 use util
::ppaux
::{ty_to_short_str, Repr}
;
45 use arena
::TypedArena
;
49 pub fn trans_exchange_free_dyn
<'blk
, 'tcx
>(cx
: Block
<'blk
, 'tcx
>,
54 -> Block
<'blk
, 'tcx
> {
55 let _icx
= push_ctxt("trans_exchange_free");
57 callee
::trans_lang_call(cx
,
58 langcall(cx
, None
, "", ExchangeFreeFnLangItem
),
59 &[PointerCast(cx
, v
, Type
::i8p(ccx
)), size
, align
],
64 pub fn trans_exchange_free
<'blk
, 'tcx
>(cx
: Block
<'blk
, 'tcx
>,
69 -> Block
<'blk
, 'tcx
> {
70 trans_exchange_free_dyn(cx
,
72 C_uint(cx
.ccx(), size
),
73 C_uint(cx
.ccx(), align
),
77 pub fn trans_exchange_free_ty
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
81 -> Block
<'blk
, 'tcx
> {
82 assert
!(type_is_sized(bcx
.ccx().tcx(), content_ty
));
83 let sizing_type
= sizing_type_of(bcx
.ccx(), content_ty
);
84 let content_size
= llsize_of_alloc(bcx
.ccx(), sizing_type
);
86 // `Box<ZeroSizeType>` does not allocate.
87 if content_size
!= 0 {
88 let content_align
= align_of(bcx
.ccx(), content_ty
);
89 trans_exchange_free(bcx
, ptr
, content_size
, content_align
, debug_loc
)
95 pub fn get_drop_glue_type
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
96 t
: Ty
<'tcx
>) -> Ty
<'tcx
> {
98 // Even if there is no dtor for t, there might be one deeper down and we
99 // might need to pass in the vtable ptr.
100 if !type_is_sized(tcx
, t
) {
104 // FIXME (#22815): note that type_needs_drop conservatively
105 // approximates in some cases and may say a type expression
106 // requires drop glue when it actually does not.
108 // (In this case it is not clear whether any harm is done, i.e.
109 // erroneously returning `t` in some cases where we could have
110 // returned `tcx.types.i8` does not appear unsound. The impact on
111 // code quality is unknown at this time.)
113 if !type_needs_drop(tcx
, t
) {
117 ty
::ty_uniq(typ
) if !type_needs_drop(tcx
, typ
)
118 && type_is_sized(tcx
, typ
) => {
119 let llty
= sizing_type_of(ccx
, typ
);
120 // `Box<ZeroSizeType>` does not allocate.
121 if llsize_of_alloc(ccx
, llty
) == 0 {
131 pub fn drop_ty
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
134 debug_loc
: DebugLoc
) -> Block
<'blk
, 'tcx
> {
135 drop_ty_core(bcx
, v
, t
, debug_loc
, false)
138 pub fn drop_ty_core
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
142 skip_dtor
: bool
) -> Block
<'blk
, 'tcx
> {
143 // NB: v is an *alias* of type t here, not a direct value.
144 debug
!("drop_ty_core(t={}, skip_dtor={})", t
.repr(bcx
.tcx()), skip_dtor
);
145 let _icx
= push_ctxt("drop_ty");
146 if bcx
.fcx
.type_needs_drop(t
) {
148 let g
= if skip_dtor
{
149 DropGlueKind
::TyContents(t
)
153 let glue
= get_drop_glue_core(ccx
, g
);
154 let glue_type
= get_drop_glue_type(ccx
, t
);
155 let ptr
= if glue_type
!= t
{
156 PointerCast(bcx
, v
, type_of(ccx
, glue_type
).ptr_to())
161 Call(bcx
, glue
, &[ptr
], None
, debug_loc
);
166 pub fn drop_ty_immediate
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
171 -> Block
<'blk
, 'tcx
> {
172 let _icx
= push_ctxt("drop_ty_immediate");
173 let vp
= alloca(bcx
, type_of(bcx
.ccx(), t
), "");
174 store_ty(bcx
, v
, vp
, t
);
175 drop_ty_core(bcx
, vp
, t
, debug_loc
, skip_dtor
)
178 pub fn get_drop_glue
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>, t
: Ty
<'tcx
>) -> ValueRef
{
179 get_drop_glue_core(ccx
, DropGlueKind
::Ty(t
))
182 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
183 pub enum DropGlueKind
<'tcx
> {
184 /// The normal path; runs the dtor, and then recurs on the contents
186 /// Skips the dtor, if any, for ty; drops the contents directly.
187 /// Note that the dtor is only skipped at the most *shallow*
188 /// level, namely, an `impl Drop for Ty` itself. So, for example,
189 /// if Ty is Newtype(S) then only the Drop impl for for Newtype
190 /// itself will be skipped, while the Drop impl for S, if any,
192 TyContents(Ty
<'tcx
>),
195 impl<'tcx
> DropGlueKind
<'tcx
> {
196 fn ty(&self) -> Ty
<'tcx
> {
197 match *self { DropGlueKind::Ty(t) | DropGlueKind::TyContents(t) => t }
200 fn map_ty
<F
>(&self, mut f
: F
) -> DropGlueKind
<'tcx
> where F
: FnMut(Ty
<'tcx
>) -> Ty
<'tcx
>
203 DropGlueKind
::Ty(t
) => DropGlueKind
::Ty(f(t
)),
204 DropGlueKind
::TyContents(t
) => DropGlueKind
::TyContents(f(t
)),
208 fn to_string
<'a
>(&self, ccx
: &CrateContext
<'a
, 'tcx
>) -> String
{
209 let t_str
= ppaux
::ty_to_string(ccx
.tcx(), self.ty());
211 DropGlueKind
::Ty(_
) => format
!("DropGlueKind::Ty({})", t_str
),
212 DropGlueKind
::TyContents(_
) => format
!("DropGlueKind::TyContents({})", t_str
),
217 fn get_drop_glue_core
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
218 g
: DropGlueKind
<'tcx
>) -> ValueRef
{
219 debug
!("make drop glue for {}", g
.to_string(ccx
));
220 let g
= g
.map_ty(|t
| get_drop_glue_type(ccx
, t
));
221 debug
!("drop glue type {}", g
.to_string(ccx
));
222 match ccx
.drop_glues().borrow().get(&g
) {
223 Some(&glue
) => return glue
,
228 let llty
= if type_is_sized(ccx
.tcx(), t
) {
229 type_of(ccx
, t
).ptr_to()
231 type_of(ccx
, ty
::mk_uniq(ccx
.tcx(), t
)).ptr_to()
234 let llfnty
= Type
::glue_fn(ccx
, llty
);
236 // To avoid infinite recursion, don't `make_drop_glue` until after we've
237 // added the entry to the `drop_glues` cache.
238 if let Some(old_sym
) = ccx
.available_drop_glues().borrow().get(&g
) {
239 let llfn
= declare
::declare_cfn(ccx
, &old_sym
, llfnty
, ty
::mk_nil(ccx
.tcx()));
240 ccx
.drop_glues().borrow_mut().insert(g
, llfn
);
244 let fn_nm
= mangle_internal_name_by_type_and_seq(ccx
, t
, "drop");
245 let llfn
= declare
::define_cfn(ccx
, &fn_nm
, llfnty
, ty
::mk_nil(ccx
.tcx())).unwrap_or_else(||{
246 ccx
.sess().bug(&format
!("symbol `{}` already defined", fn_nm
));
248 ccx
.available_drop_glues().borrow_mut().insert(g
, fn_nm
);
250 let _s
= StatRecorder
::new(ccx
, format
!("drop {}", ty_to_short_str(ccx
.tcx(), t
)));
252 let empty_substs
= ccx
.tcx().mk_substs(Substs
::trans_empty());
253 let (arena
, fcx
): (TypedArena
<_
>, FunctionContext
);
254 arena
= TypedArena
::new();
255 fcx
= new_fn_ctxt(ccx
, llfn
, ast
::DUMMY_NODE_ID
, false,
256 ty
::FnConverging(ty
::mk_nil(ccx
.tcx())),
257 empty_substs
, None
, &arena
);
259 let bcx
= init_function(&fcx
, false, ty
::FnConverging(ty
::mk_nil(ccx
.tcx())));
261 update_linkage(ccx
, llfn
, None
, OriginalTranslation
);
263 ccx
.stats().n_glues_created
.set(ccx
.stats().n_glues_created
.get() + 1);
264 // All glue functions take values passed *by alias*; this is a
265 // requirement since in many contexts glue is invoked indirectly and
266 // the caller has no idea if it's dealing with something that can be
269 // llfn is expected be declared to take a parameter of the appropriate
270 // type, so we don't need to explicitly cast the function parameter.
272 let llrawptr0
= get_param(llfn
, fcx
.arg_pos(0) as c_uint
);
273 let bcx
= make_drop_glue(bcx
, llrawptr0
, g
);
274 finish_fn(&fcx
, bcx
, ty
::FnConverging(ty
::mk_nil(ccx
.tcx())), DebugLoc
::None
);
279 fn trans_struct_drop_flag
<'blk
, 'tcx
>(mut bcx
: Block
<'blk
, 'tcx
>,
281 struct_data
: ValueRef
,
282 dtor_did
: ast
::DefId
,
283 class_did
: ast
::DefId
,
284 substs
: &subst
::Substs
<'tcx
>)
285 -> Block
<'blk
, 'tcx
> {
286 assert
!(type_is_sized(bcx
.tcx(), t
), "Precondition: caller must ensure t is sized");
288 let repr
= adt
::represent_type(bcx
.ccx(), t
);
289 let drop_flag
= unpack_datum
!(bcx
, adt
::trans_drop_flag_ptr(bcx
, &*repr
, struct_data
));
290 let loaded
= load_ty(bcx
, drop_flag
.val
, bcx
.tcx().dtor_type());
291 let drop_flag_llty
= type_of(bcx
.fcx
.ccx
, bcx
.tcx().dtor_type());
292 let init_val
= C_integral(drop_flag_llty
, adt
::DTOR_NEEDED
as u64, false);
294 let bcx
= if !bcx
.ccx().check_drop_flag_for_sanity() {
297 let drop_flag_llty
= type_of(bcx
.fcx
.ccx
, bcx
.tcx().dtor_type());
298 let done_val
= C_integral(drop_flag_llty
, adt
::DTOR_DONE
as u64, false);
299 let not_init
= ICmp(bcx
, llvm
::IntNE
, loaded
, init_val
, DebugLoc
::None
);
300 let not_done
= ICmp(bcx
, llvm
::IntNE
, loaded
, done_val
, DebugLoc
::None
);
301 let drop_flag_neither_initialized_nor_cleared
=
302 And(bcx
, not_init
, not_done
, DebugLoc
::None
);
303 with_cond(bcx
, drop_flag_neither_initialized_nor_cleared
, |cx
| {
304 let llfn
= cx
.ccx().get_intrinsic(&("llvm.debugtrap"));
305 Call(cx
, llfn
, &[], None
, DebugLoc
::None
);
310 let drop_flag_dtor_needed
= ICmp(bcx
, llvm
::IntEQ
, loaded
, init_val
, DebugLoc
::None
);
311 with_cond(bcx
, drop_flag_dtor_needed
, |cx
| {
312 trans_struct_drop(cx
, t
, struct_data
, dtor_did
, class_did
, substs
)
316 pub fn get_res_dtor
<'a
, 'tcx
>(ccx
: &CrateContext
<'a
, 'tcx
>,
319 parent_id
: ast
::DefId
,
320 substs
: &Substs
<'tcx
>)
322 let _icx
= push_ctxt("trans_res_dtor");
323 let did
= inline
::maybe_instantiate_inline(ccx
, did
);
325 if !substs
.types
.is_empty() {
326 assert_eq
!(did
.krate
, ast
::LOCAL_CRATE
);
328 // Since we're in trans we don't care for any region parameters
329 let substs
= ccx
.tcx().mk_substs(Substs
::erased(substs
.types
.clone()));
331 let (val
, _
, _
) = monomorphize
::monomorphic_fn(ccx
, did
, substs
, None
);
334 } else if did
.krate
== ast
::LOCAL_CRATE
{
335 get_item_val(ccx
, did
.node
)
338 let name
= csearch
::get_symbol(&ccx
.sess().cstore
, did
);
339 let class_ty
= ty
::lookup_item_type(tcx
, parent_id
).ty
.subst(tcx
, substs
);
340 let llty
= type_of_dtor(ccx
, class_ty
);
341 let dtor_ty
= ty
::mk_ctor_fn(ccx
.tcx(),
343 &[get_drop_glue_type(ccx
, t
)],
344 ty
::mk_nil(ccx
.tcx()));
345 foreign
::get_extern_fn(ccx
, &mut *ccx
.externs().borrow_mut(), &name
[..], llvm
::CCallConv
,
350 fn trans_struct_drop
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>,
353 dtor_did
: ast
::DefId
,
354 class_did
: ast
::DefId
,
355 substs
: &subst
::Substs
<'tcx
>)
358 debug
!("trans_struct_drop t: {}", bcx
.ty_to_string(t
));
360 // Find and call the actual destructor
361 let dtor_addr
= get_res_dtor(bcx
.ccx(), dtor_did
, t
, class_did
, substs
);
363 // Class dtors have no explicit args, so the params should
364 // just consist of the environment (self).
365 let params
= unsafe {
366 let ty
= Type
::from_ref(llvm
::LLVMTypeOf(dtor_addr
));
367 ty
.element_type().func_params()
369 assert_eq
!(params
.len(), 1);
371 // Be sure to put the contents into a scope so we can use an invoke
372 // instruction to call the user destructor but still call the field
373 // destructors if the user destructor panics.
375 // FIXME (#14875) panic-in-drop semantics might be unsupported; we
376 // might well consider changing below to more direct code.
377 let contents_scope
= bcx
.fcx
.push_custom_cleanup_scope();
379 // Issue #23611: schedule cleanup of contents, re-inspecting the
380 // discriminant (if any) in case of variant swap in drop code.
381 bcx
.fcx
.schedule_drop_adt_contents(cleanup
::CustomScope(contents_scope
), v0
, t
);
383 let glue_type
= get_drop_glue_type(bcx
.ccx(), t
);
384 let dtor_ty
= ty
::mk_ctor_fn(bcx
.tcx(), class_did
, &[glue_type
], ty
::mk_nil(bcx
.tcx()));
385 let (_
, bcx
) = invoke(bcx
, dtor_addr
, &[v0
], dtor_ty
, DebugLoc
::None
);
387 bcx
.fcx
.pop_and_trans_custom_cleanup_scope(bcx
, contents_scope
)
390 pub fn size_and_align_of_dst
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>, t
: Ty
<'tcx
>, info
: ValueRef
)
391 -> (ValueRef
, ValueRef
) {
392 debug
!("calculate size of DST: {}; with lost info: {}",
393 bcx
.ty_to_string(t
), bcx
.val_to_string(info
));
394 if type_is_sized(bcx
.tcx(), t
) {
395 let sizing_type
= sizing_type_of(bcx
.ccx(), t
);
396 let size
= C_uint(bcx
.ccx(), llsize_of_alloc(bcx
.ccx(), sizing_type
));
397 let align
= C_uint(bcx
.ccx(), align_of(bcx
.ccx(), t
));
398 return (size
, align
);
401 ty
::ty_struct(id
, substs
) => {
403 // First get the size of all statically known fields.
404 // Don't use type_of::sizing_type_of because that expects t to be sized.
405 assert
!(!ty
::type_is_simd(bcx
.tcx(), t
));
406 let repr
= adt
::represent_type(ccx
, t
);
407 let sizing_type
= adt
::sizing_type_of(ccx
, &*repr
, true);
408 let sized_size
= C_uint(ccx
, llsize_of_alloc(ccx
, sizing_type
));
409 let sized_align
= C_uint(ccx
, llalign_of_min(ccx
, sizing_type
));
411 // Recurse to get the size of the dynamically sized field (must be
413 let fields
= ty
::struct_fields(bcx
.tcx(), id
, substs
);
414 let last_field
= fields
[fields
.len()-1];
415 let field_ty
= last_field
.mt
.ty
;
416 let (unsized_size
, unsized_align
) = size_and_align_of_dst(bcx
, field_ty
, info
);
418 // Return the sum of sizes and max of aligns.
419 let size
= Add(bcx
, sized_size
, unsized_size
, DebugLoc
::None
);
420 let align
= Select(bcx
,
430 ty
::ty_trait(..) => {
431 // info points to the vtable and the second entry in the vtable is the
432 // dynamic size of the object.
433 let info
= PointerCast(bcx
, info
, Type
::int(bcx
.ccx()).ptr_to());
434 let size_ptr
= GEPi(bcx
, info
, &[1]);
435 let align_ptr
= GEPi(bcx
, info
, &[2]);
436 (Load(bcx
, size_ptr
), Load(bcx
, align_ptr
))
438 ty
::ty_vec(_
, None
) | ty
::ty_str
=> {
439 let unit_ty
= ty
::sequence_element_type(bcx
.tcx(), t
);
440 // The info in this case is the length of the str, so the size is that
441 // times the unit size.
442 let llunit_ty
= sizing_type_of(bcx
.ccx(), unit_ty
);
443 let unit_align
= llalign_of_min(bcx
.ccx(), llunit_ty
);
444 let unit_size
= llsize_of_alloc(bcx
.ccx(), llunit_ty
);
445 (Mul(bcx
, info
, C_uint(bcx
.ccx(), unit_size
), DebugLoc
::None
),
446 C_uint(bcx
.ccx(), unit_align
))
448 _
=> bcx
.sess().bug(&format
!("Unexpected unsized type, found {}",
449 bcx
.ty_to_string(t
)))
453 fn make_drop_glue
<'blk
, 'tcx
>(bcx
: Block
<'blk
, 'tcx
>, v0
: ValueRef
, g
: DropGlueKind
<'tcx
>)
454 -> Block
<'blk
, 'tcx
> {
456 let skip_dtor
= match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true }
;
457 // NB: v0 is an *alias* of type t here, not a direct value.
458 let _icx
= push_ctxt("make_drop_glue");
460 // Only drop the value when it ... well, we used to check for
461 // non-null, (and maybe we need to continue doing so), but we now
462 // must definitely check for special bit-patterns corresponding to
463 // the special dtor markings.
465 let inttype
= Type
::int(bcx
.ccx());
466 let dropped_pattern
= C_integral(inttype
, adt
::dtor_done_usize(bcx
.fcx
.ccx
) as u64, false);
469 ty
::ty_uniq(content_ty
) => {
470 // Support for ty_uniq is built-in and its drop glue is
471 // special. It may move to library and have Drop impl. As
472 // a safe-guard, assert ty_uniq not used with TyContents.
474 if !type_is_sized(bcx
.tcx(), content_ty
) {
475 let llval
= GEPi(bcx
, v0
, &[0, abi
::FAT_PTR_ADDR
]);
476 let llbox
= Load(bcx
, llval
);
477 let llbox_as_usize
= PtrToInt(bcx
, llbox
, Type
::int(bcx
.ccx()));
478 let drop_flag_not_dropped_already
=
479 ICmp(bcx
, llvm
::IntNE
, llbox_as_usize
, dropped_pattern
, DebugLoc
::None
);
480 with_cond(bcx
, drop_flag_not_dropped_already
, |bcx
| {
481 let bcx
= drop_ty(bcx
, v0
, content_ty
, DebugLoc
::None
);
482 let info
= GEPi(bcx
, v0
, &[0, abi
::FAT_PTR_EXTRA
]);
483 let info
= Load(bcx
, info
);
484 let (llsize
, llalign
) = size_and_align_of_dst(bcx
, content_ty
, info
);
486 // `Box<ZeroSizeType>` does not allocate.
487 let needs_free
= ICmp(bcx
,
490 C_uint(bcx
.ccx(), 0u64),
492 with_cond(bcx
, needs_free
, |bcx
| {
493 trans_exchange_free_dyn(bcx
, llbox
, llsize
, llalign
, DebugLoc
::None
)
498 let llbox
= Load(bcx
, llval
);
499 let llbox_as_usize
= PtrToInt(bcx
, llbox
, inttype
);
500 let drop_flag_not_dropped_already
=
501 ICmp(bcx
, llvm
::IntNE
, llbox_as_usize
, dropped_pattern
, DebugLoc
::None
);
502 with_cond(bcx
, drop_flag_not_dropped_already
, |bcx
| {
503 let bcx
= drop_ty(bcx
, llbox
, content_ty
, DebugLoc
::None
);
504 trans_exchange_free_ty(bcx
, llbox
, content_ty
, DebugLoc
::None
)
508 ty
::ty_struct(did
, substs
) | ty
::ty_enum(did
, substs
) => {
510 match (ty
::ty_dtor(tcx
, did
), skip_dtor
) {
511 (ty
::TraitDtor(dtor
, true), false) => {
512 // FIXME(16758) Since the struct is unsized, it is hard to
513 // find the drop flag (which is at the end of the struct).
514 // Lets just ignore the flag and pretend everything will be
516 if type_is_sized(bcx
.tcx(), t
) {
517 trans_struct_drop_flag(bcx
, t
, v0
, dtor
, did
, substs
)
519 // Give the user a heads up that we are doing something
520 // stupid and dangerous.
521 bcx
.sess().warn(&format
!("Ignoring drop flag in destructor for {}\
522 because the struct is unsized. See issue\
524 bcx
.ty_to_string(t
)));
525 trans_struct_drop(bcx
, t
, v0
, dtor
, did
, substs
)
528 (ty
::TraitDtor(dtor
, false), false) => {
529 trans_struct_drop(bcx
, t
, v0
, dtor
, did
, substs
)
531 (ty
::NoDtor
, _
) | (_
, true) => {
532 // No dtor? Just the default case
533 iter_structural_ty(bcx
, v0
, t
, |bb
, vv
, tt
| drop_ty(bb
, vv
, tt
, DebugLoc
::None
))
537 ty
::ty_trait(..) => {
538 // No support in vtable for distinguishing destroying with
539 // versus without calling Drop::drop. Assert caller is
540 // okay with always calling the Drop impl, if any.
542 let data_ptr
= GEPi(bcx
, v0
, &[0, abi
::FAT_PTR_ADDR
]);
543 let vtable_ptr
= Load(bcx
, GEPi(bcx
, v0
, &[0, abi
::FAT_PTR_EXTRA
]));
544 let dtor
= Load(bcx
, vtable_ptr
);
547 &[PointerCast(bcx
, Load(bcx
, data_ptr
), Type
::i8p(bcx
.ccx()))],
553 if bcx
.fcx
.type_needs_drop(t
) {
554 iter_structural_ty(bcx
,
557 |bb
, vv
, tt
| drop_ty(bb
, vv
, tt
, DebugLoc
::None
))