]> git.proxmox.com Git - rustc.git/blob - src/librustc_trans/trans/glue.rs
Move away from hash to the same rust naming schema
[rustc.git] / src / librustc_trans / trans / glue.rs
1 // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //!
12 //
13 // Code relating to drop glue.
14
15
16 use back::link::*;
17 use llvm;
18 use llvm::{ValueRef, get_param};
19 use middle::lang_items::ExchangeFreeFnLangItem;
20 use middle::subst::{Substs};
21 use middle::traits;
22 use middle::ty::{self, Ty};
23 use trans::adt;
24 use trans::adt::GetDtorType; // for tcx.dtor_type()
25 use trans::base::*;
26 use trans::build::*;
27 use trans::callee;
28 use trans::cleanup;
29 use trans::cleanup::CleanupMethods;
30 use trans::common::*;
31 use trans::debuginfo::DebugLoc;
32 use trans::declare;
33 use trans::expr;
34 use trans::machine::*;
35 use trans::monomorphize;
36 use trans::type_of::{type_of, sizing_type_of, align_of};
37 use trans::type_::Type;
38
39 use arena::TypedArena;
40 use libc::c_uint;
41 use syntax::ast;
42 use syntax::codemap::DUMMY_SP;
43
44 pub fn trans_exchange_free_dyn<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
45 v: ValueRef,
46 size: ValueRef,
47 align: ValueRef,
48 debug_loc: DebugLoc)
49 -> Block<'blk, 'tcx> {
50 let _icx = push_ctxt("trans_exchange_free");
51 let ccx = cx.ccx();
52 callee::trans_lang_call(cx,
53 langcall(cx, None, "", ExchangeFreeFnLangItem),
54 &[PointerCast(cx, v, Type::i8p(ccx)), size, align],
55 Some(expr::Ignore),
56 debug_loc).bcx
57 }
58
59 pub fn trans_exchange_free<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
60 v: ValueRef,
61 size: u64,
62 align: u32,
63 debug_loc: DebugLoc)
64 -> Block<'blk, 'tcx> {
65 trans_exchange_free_dyn(cx,
66 v,
67 C_uint(cx.ccx(), size),
68 C_uint(cx.ccx(), align),
69 debug_loc)
70 }
71
72 pub fn trans_exchange_free_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
73 ptr: ValueRef,
74 content_ty: Ty<'tcx>,
75 debug_loc: DebugLoc)
76 -> Block<'blk, 'tcx> {
77 assert!(type_is_sized(bcx.ccx().tcx(), content_ty));
78 let sizing_type = sizing_type_of(bcx.ccx(), content_ty);
79 let content_size = llsize_of_alloc(bcx.ccx(), sizing_type);
80
81 // `Box<ZeroSizeType>` does not allocate.
82 if content_size != 0 {
83 let content_align = align_of(bcx.ccx(), content_ty);
84 trans_exchange_free(bcx, ptr, content_size, content_align, debug_loc)
85 } else {
86 bcx
87 }
88 }
89
90 pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
91 t: Ty<'tcx>) -> Ty<'tcx> {
92 let tcx = ccx.tcx();
93 // Even if there is no dtor for t, there might be one deeper down and we
94 // might need to pass in the vtable ptr.
95 if !type_is_sized(tcx, t) {
96 return t
97 }
98
99 // FIXME (#22815): note that type_needs_drop conservatively
100 // approximates in some cases and may say a type expression
101 // requires drop glue when it actually does not.
102 //
103 // (In this case it is not clear whether any harm is done, i.e.
104 // erroneously returning `t` in some cases where we could have
105 // returned `tcx.types.i8` does not appear unsound. The impact on
106 // code quality is unknown at this time.)
107
108 if !type_needs_drop(tcx, t) {
109 return tcx.types.i8;
110 }
111 match t.sty {
112 ty::TyBox(typ) if !type_needs_drop(tcx, typ)
113 && type_is_sized(tcx, typ) => {
114 let llty = sizing_type_of(ccx, typ);
115 // `Box<ZeroSizeType>` does not allocate.
116 if llsize_of_alloc(ccx, llty) == 0 {
117 tcx.types.i8
118 } else {
119 t
120 }
121 }
122 _ => t
123 }
124 }
125
126 pub fn drop_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
127 v: ValueRef,
128 t: Ty<'tcx>,
129 debug_loc: DebugLoc) -> Block<'blk, 'tcx> {
130 drop_ty_core(bcx, v, t, debug_loc, false, None)
131 }
132
133 pub fn drop_ty_core<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
134 v: ValueRef,
135 t: Ty<'tcx>,
136 debug_loc: DebugLoc,
137 skip_dtor: bool,
138 drop_hint: Option<cleanup::DropHintValue>)
139 -> Block<'blk, 'tcx> {
140 // NB: v is an *alias* of type t here, not a direct value.
141 debug!("drop_ty_core(t={:?}, skip_dtor={} drop_hint={:?})", t, skip_dtor, drop_hint);
142 let _icx = push_ctxt("drop_ty");
143 let mut bcx = bcx;
144 if bcx.fcx.type_needs_drop(t) {
145 let ccx = bcx.ccx();
146 let g = if skip_dtor {
147 DropGlueKind::TyContents(t)
148 } else {
149 DropGlueKind::Ty(t)
150 };
151 let glue = get_drop_glue_core(ccx, g);
152 let glue_type = get_drop_glue_type(ccx, t);
153 let ptr = if glue_type != t {
154 PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to())
155 } else {
156 v
157 };
158
159 match drop_hint {
160 Some(drop_hint) => {
161 let hint_val = load_ty(bcx, drop_hint.value(), bcx.tcx().types.u8);
162 let moved_val =
163 C_integral(Type::i8(bcx.ccx()), adt::DTOR_MOVED_HINT as u64, false);
164 let may_need_drop =
165 ICmp(bcx, llvm::IntNE, hint_val, moved_val, DebugLoc::None);
166 bcx = with_cond(bcx, may_need_drop, |cx| {
167 Call(cx, glue, &[ptr], None, debug_loc);
168 cx
169 })
170 }
171 None => {
172 // No drop-hint ==> call standard drop glue
173 Call(bcx, glue, &[ptr], None, debug_loc);
174 }
175 }
176 }
177 bcx
178 }
179
180 pub fn drop_ty_immediate<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
181 v: ValueRef,
182 t: Ty<'tcx>,
183 debug_loc: DebugLoc,
184 skip_dtor: bool)
185 -> Block<'blk, 'tcx> {
186 let _icx = push_ctxt("drop_ty_immediate");
187 let vp = alloc_ty(bcx, t, "");
188 call_lifetime_start(bcx, vp);
189 store_ty(bcx, v, vp, t);
190 let bcx = drop_ty_core(bcx, vp, t, debug_loc, skip_dtor, None);
191 call_lifetime_end(bcx, vp);
192 bcx
193 }
194
195 pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
196 get_drop_glue_core(ccx, DropGlueKind::Ty(t))
197 }
198
199 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
200 pub enum DropGlueKind<'tcx> {
201 /// The normal path; runs the dtor, and then recurs on the contents
202 Ty(Ty<'tcx>),
203 /// Skips the dtor, if any, for ty; drops the contents directly.
204 /// Note that the dtor is only skipped at the most *shallow*
205 /// level, namely, an `impl Drop for Ty` itself. So, for example,
206 /// if Ty is Newtype(S) then only the Drop impl for for Newtype
207 /// itself will be skipped, while the Drop impl for S, if any,
208 /// will be invoked.
209 TyContents(Ty<'tcx>),
210 }
211
212 impl<'tcx> DropGlueKind<'tcx> {
213 fn ty(&self) -> Ty<'tcx> {
214 match *self { DropGlueKind::Ty(t) | DropGlueKind::TyContents(t) => t }
215 }
216
217 fn map_ty<F>(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>) -> Ty<'tcx>
218 {
219 match *self {
220 DropGlueKind::Ty(t) => DropGlueKind::Ty(f(t)),
221 DropGlueKind::TyContents(t) => DropGlueKind::TyContents(f(t)),
222 }
223 }
224 }
225
226 fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
227 g: DropGlueKind<'tcx>) -> ValueRef {
228 debug!("make drop glue for {:?}", g);
229 let g = g.map_ty(|t| get_drop_glue_type(ccx, t));
230 debug!("drop glue type {:?}", g);
231 match ccx.drop_glues().borrow().get(&g) {
232 Some(&glue) => return glue,
233 _ => { }
234 }
235 let t = g.ty();
236
237 let llty = if type_is_sized(ccx.tcx(), t) {
238 type_of(ccx, t).ptr_to()
239 } else {
240 type_of(ccx, ccx.tcx().mk_box(t)).ptr_to()
241 };
242
243 let llfnty = Type::glue_fn(ccx, llty);
244
245 // To avoid infinite recursion, don't `make_drop_glue` until after we've
246 // added the entry to the `drop_glues` cache.
247 if let Some(old_sym) = ccx.available_drop_glues().borrow().get(&g) {
248 let llfn = declare::declare_cfn(ccx, &old_sym, llfnty, ccx.tcx().mk_nil());
249 ccx.drop_glues().borrow_mut().insert(g, llfn);
250 return llfn;
251 };
252
253 let fn_nm = mangle_internal_name_by_type_and_seq(ccx, t, "drop");
254 let llfn = declare::define_cfn(ccx, &fn_nm, llfnty, ccx.tcx().mk_nil()).unwrap_or_else(||{
255 ccx.sess().bug(&format!("symbol `{}` already defined", fn_nm));
256 });
257 ccx.available_drop_glues().borrow_mut().insert(g, fn_nm);
258
259 let _s = StatRecorder::new(ccx, format!("drop {:?}", t));
260
261 let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
262 let (arena, fcx): (TypedArena<_>, FunctionContext);
263 arena = TypedArena::new();
264 fcx = new_fn_ctxt(ccx, llfn, ast::DUMMY_NODE_ID, false,
265 ty::FnConverging(ccx.tcx().mk_nil()),
266 empty_substs, None, &arena);
267
268 let bcx = init_function(&fcx, false, ty::FnConverging(ccx.tcx().mk_nil()));
269
270 update_linkage(ccx, llfn, None, OriginalTranslation);
271
272 ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
273 // All glue functions take values passed *by alias*; this is a
274 // requirement since in many contexts glue is invoked indirectly and
275 // the caller has no idea if it's dealing with something that can be
276 // passed by value.
277 //
278 // llfn is expected be declared to take a parameter of the appropriate
279 // type, so we don't need to explicitly cast the function parameter.
280
281 let llrawptr0 = get_param(llfn, fcx.arg_offset() as c_uint);
282 let bcx = make_drop_glue(bcx, llrawptr0, g);
283 finish_fn(&fcx, bcx, ty::FnConverging(ccx.tcx().mk_nil()), DebugLoc::None);
284
285 llfn
286 }
287
288 fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
289 t: Ty<'tcx>,
290 struct_data: ValueRef)
291 -> Block<'blk, 'tcx> {
292 assert!(type_is_sized(bcx.tcx(), t), "Precondition: caller must ensure t is sized");
293
294 let repr = adt::represent_type(bcx.ccx(), t);
295 let drop_flag = unpack_datum!(bcx, adt::trans_drop_flag_ptr(bcx, &*repr, struct_data));
296 let loaded = load_ty(bcx, drop_flag.val, bcx.tcx().dtor_type());
297 let drop_flag_llty = type_of(bcx.fcx.ccx, bcx.tcx().dtor_type());
298 let init_val = C_integral(drop_flag_llty, adt::DTOR_NEEDED as u64, false);
299
300 let bcx = if !bcx.ccx().check_drop_flag_for_sanity() {
301 bcx
302 } else {
303 let drop_flag_llty = type_of(bcx.fcx.ccx, bcx.tcx().dtor_type());
304 let done_val = C_integral(drop_flag_llty, adt::DTOR_DONE as u64, false);
305 let not_init = ICmp(bcx, llvm::IntNE, loaded, init_val, DebugLoc::None);
306 let not_done = ICmp(bcx, llvm::IntNE, loaded, done_val, DebugLoc::None);
307 let drop_flag_neither_initialized_nor_cleared =
308 And(bcx, not_init, not_done, DebugLoc::None);
309 with_cond(bcx, drop_flag_neither_initialized_nor_cleared, |cx| {
310 let llfn = cx.ccx().get_intrinsic(&("llvm.debugtrap"));
311 Call(cx, llfn, &[], None, DebugLoc::None);
312 cx
313 })
314 };
315
316 let drop_flag_dtor_needed = ICmp(bcx, llvm::IntEQ, loaded, init_val, DebugLoc::None);
317 with_cond(bcx, drop_flag_dtor_needed, |cx| {
318 trans_struct_drop(cx, t, struct_data)
319 })
320 }
321 fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
322 t: Ty<'tcx>,
323 v0: ValueRef)
324 -> Block<'blk, 'tcx>
325 {
326 debug!("trans_struct_drop t: {}", t);
327 let tcx = bcx.tcx();
328 let mut bcx = bcx;
329
330 let def = t.ty_adt_def().unwrap();
331
332 // Be sure to put the contents into a scope so we can use an invoke
333 // instruction to call the user destructor but still call the field
334 // destructors if the user destructor panics.
335 //
336 // FIXME (#14875) panic-in-drop semantics might be unsupported; we
337 // might well consider changing below to more direct code.
338 let contents_scope = bcx.fcx.push_custom_cleanup_scope();
339
340 // Issue #23611: schedule cleanup of contents, re-inspecting the
341 // discriminant (if any) in case of variant swap in drop code.
342 bcx.fcx.schedule_drop_adt_contents(cleanup::CustomScope(contents_scope), v0, t);
343
344 let (sized_args, unsized_args);
345 let args: &[ValueRef] = if type_is_sized(tcx, t) {
346 sized_args = [v0];
347 &sized_args
348 } else {
349 unsized_args = [Load(bcx, expr::get_dataptr(bcx, v0)), Load(bcx, expr::get_meta(bcx, v0))];
350 &unsized_args
351 };
352
353 bcx = callee::trans_call_inner(bcx, DebugLoc::None, |bcx, _| {
354 let trait_ref = ty::Binder(ty::TraitRef {
355 def_id: tcx.lang_items.drop_trait().unwrap(),
356 substs: tcx.mk_substs(Substs::trans_empty().with_self_ty(t))
357 });
358 let vtbl = match fulfill_obligation(bcx.ccx(), DUMMY_SP, trait_ref) {
359 traits::VtableImpl(data) => data,
360 _ => tcx.sess.bug(&format!("dtor for {:?} is not an impl???", t))
361 };
362 let dtor_did = def.destructor().unwrap();
363 let datum = callee::trans_fn_ref_with_substs(bcx.ccx(),
364 dtor_did,
365 ExprId(0),
366 bcx.fcx.param_substs,
367 vtbl.substs);
368 callee::Callee {
369 bcx: bcx,
370 data: callee::Fn(datum.val),
371 ty: datum.ty
372 }
373 }, callee::ArgVals(args), Some(expr::Ignore)).bcx;
374
375 bcx.fcx.pop_and_trans_custom_cleanup_scope(bcx, contents_scope)
376 }
377
378 pub fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: ValueRef)
379 -> (ValueRef, ValueRef) {
380 debug!("calculate size of DST: {}; with lost info: {}",
381 t, bcx.val_to_string(info));
382 if type_is_sized(bcx.tcx(), t) {
383 let sizing_type = sizing_type_of(bcx.ccx(), t);
384 let size = llsize_of_alloc(bcx.ccx(), sizing_type);
385 let align = align_of(bcx.ccx(), t);
386 debug!("size_and_align_of_dst t={} info={} size: {} align: {}",
387 t, bcx.val_to_string(info), size, align);
388 let size = C_uint(bcx.ccx(), size);
389 let align = C_uint(bcx.ccx(), align);
390 return (size, align);
391 }
392 match t.sty {
393 ty::TyStruct(def, substs) => {
394 let ccx = bcx.ccx();
395 // First get the size of all statically known fields.
396 // Don't use type_of::sizing_type_of because that expects t to be sized.
397 assert!(!t.is_simd());
398 let repr = adt::represent_type(ccx, t);
399 let sizing_type = adt::sizing_type_context_of(ccx, &*repr, true);
400 debug!("DST {} sizing_type: {}", t, sizing_type.to_string());
401 let sized_size = llsize_of_alloc(ccx, sizing_type.prefix());
402 let sized_align = llalign_of_min(ccx, sizing_type.prefix());
403 debug!("DST {} statically sized prefix size: {} align: {}",
404 t, sized_size, sized_align);
405 let sized_size = C_uint(ccx, sized_size);
406 let sized_align = C_uint(ccx, sized_align);
407
408 // Recurse to get the size of the dynamically sized field (must be
409 // the last field).
410 let last_field = def.struct_variant().fields.last().unwrap();
411 let field_ty = monomorphize::field_ty(bcx.tcx(), substs, last_field);
412 let (unsized_size, unsized_align) = size_and_align_of_dst(bcx, field_ty, info);
413
414 let dbloc = DebugLoc::None;
415
416 // FIXME (#26403, #27023): We should be adding padding
417 // to `sized_size` (to accommodate the `unsized_align`
418 // required of the unsized field that follows) before
419 // summing it with `sized_size`. (Note that since #26403
420 // is unfixed, we do not yet add the necessary padding
421 // here. But this is where the add would go.)
422
423 // Return the sum of sizes and max of aligns.
424 let mut size = Add(bcx, sized_size, unsized_size, dbloc);
425
426 // Issue #27023: If there is a drop flag, *now* we add 1
427 // to the size. (We can do this without adding any
428 // padding because drop flags do not have any alignment
429 // constraints.)
430 if sizing_type.needs_drop_flag() {
431 size = Add(bcx, size, C_uint(bcx.ccx(), 1_u64), dbloc);
432 }
433
434 // Choose max of two known alignments (combined value must
435 // be aligned according to more restrictive of the two).
436 let align = Select(bcx,
437 ICmp(bcx,
438 llvm::IntUGT,
439 sized_align,
440 unsized_align,
441 dbloc),
442 sized_align,
443 unsized_align);
444
445 // Issue #27023: must add any necessary padding to `size`
446 // (to make it a multiple of `align`) before returning it.
447 //
448 // Namely, the returned size should be, in C notation:
449 //
450 // `size + ((size & (align-1)) ? align : 0)`
451 //
452 // emulated via the semi-standard fast bit trick:
453 //
454 // `(size + (align-1)) & !align`
455
456 let addend = Sub(bcx, align, C_uint(bcx.ccx(), 1_u64), dbloc);
457 let size = And(
458 bcx, Add(bcx, size, addend, dbloc), Neg(bcx, align, dbloc), dbloc);
459
460 (size, align)
461 }
462 ty::TyTrait(..) => {
463 // info points to the vtable and the second entry in the vtable is the
464 // dynamic size of the object.
465 let info = PointerCast(bcx, info, Type::int(bcx.ccx()).ptr_to());
466 let size_ptr = GEPi(bcx, info, &[1]);
467 let align_ptr = GEPi(bcx, info, &[2]);
468 (Load(bcx, size_ptr), Load(bcx, align_ptr))
469 }
470 ty::TySlice(_) | ty::TyStr => {
471 let unit_ty = t.sequence_element_type(bcx.tcx());
472 // The info in this case is the length of the str, so the size is that
473 // times the unit size.
474 let llunit_ty = sizing_type_of(bcx.ccx(), unit_ty);
475 let unit_align = llalign_of_min(bcx.ccx(), llunit_ty);
476 let unit_size = llsize_of_alloc(bcx.ccx(), llunit_ty);
477 (Mul(bcx, info, C_uint(bcx.ccx(), unit_size), DebugLoc::None),
478 C_uint(bcx.ccx(), unit_align))
479 }
480 _ => bcx.sess().bug(&format!("Unexpected unsized type, found {}", t))
481 }
482 }
483
484 fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, g: DropGlueKind<'tcx>)
485 -> Block<'blk, 'tcx> {
486 let t = g.ty();
487 let skip_dtor = match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true };
488 // NB: v0 is an *alias* of type t here, not a direct value.
489 let _icx = push_ctxt("make_drop_glue");
490
491 // Only drop the value when it ... well, we used to check for
492 // non-null, (and maybe we need to continue doing so), but we now
493 // must definitely check for special bit-patterns corresponding to
494 // the special dtor markings.
495
496 let inttype = Type::int(bcx.ccx());
497 let dropped_pattern = C_integral(inttype, adt::dtor_done_usize(bcx.fcx.ccx) as u64, false);
498
499 match t.sty {
500 ty::TyBox(content_ty) => {
501 // Support for TyBox is built-in and its drop glue is
502 // special. It may move to library and have Drop impl. As
503 // a safe-guard, assert TyBox not used with TyContents.
504 assert!(!skip_dtor);
505 if !type_is_sized(bcx.tcx(), content_ty) {
506 let llval = expr::get_dataptr(bcx, v0);
507 let llbox = Load(bcx, llval);
508 let llbox_as_usize = PtrToInt(bcx, llbox, Type::int(bcx.ccx()));
509 let drop_flag_not_dropped_already =
510 ICmp(bcx, llvm::IntNE, llbox_as_usize, dropped_pattern, DebugLoc::None);
511 with_cond(bcx, drop_flag_not_dropped_already, |bcx| {
512 let bcx = drop_ty(bcx, v0, content_ty, DebugLoc::None);
513 let info = expr::get_meta(bcx, v0);
514 let info = Load(bcx, info);
515 let (llsize, llalign) = size_and_align_of_dst(bcx, content_ty, info);
516
517 // `Box<ZeroSizeType>` does not allocate.
518 let needs_free = ICmp(bcx,
519 llvm::IntNE,
520 llsize,
521 C_uint(bcx.ccx(), 0u64),
522 DebugLoc::None);
523 with_cond(bcx, needs_free, |bcx| {
524 trans_exchange_free_dyn(bcx, llbox, llsize, llalign, DebugLoc::None)
525 })
526 })
527 } else {
528 let llval = v0;
529 let llbox = Load(bcx, llval);
530 let llbox_as_usize = PtrToInt(bcx, llbox, inttype);
531 let drop_flag_not_dropped_already =
532 ICmp(bcx, llvm::IntNE, llbox_as_usize, dropped_pattern, DebugLoc::None);
533 with_cond(bcx, drop_flag_not_dropped_already, |bcx| {
534 let bcx = drop_ty(bcx, llbox, content_ty, DebugLoc::None);
535 trans_exchange_free_ty(bcx, llbox, content_ty, DebugLoc::None)
536 })
537 }
538 }
539 ty::TyStruct(def, _) | ty::TyEnum(def, _) => {
540 match (def.dtor_kind(), skip_dtor) {
541 (ty::TraitDtor(true), false) => {
542 // FIXME(16758) Since the struct is unsized, it is hard to
543 // find the drop flag (which is at the end of the struct).
544 // Lets just ignore the flag and pretend everything will be
545 // OK.
546 if type_is_sized(bcx.tcx(), t) {
547 trans_struct_drop_flag(bcx, t, v0)
548 } else {
549 // Give the user a heads up that we are doing something
550 // stupid and dangerous.
551 bcx.sess().warn(&format!("Ignoring drop flag in destructor for {}\
552 because the struct is unsized. See issue\
553 #16758", t));
554 trans_struct_drop(bcx, t, v0)
555 }
556 }
557 (ty::TraitDtor(false), false) => {
558 trans_struct_drop(bcx, t, v0)
559 }
560 (ty::NoDtor, _) | (_, true) => {
561 // No dtor? Just the default case
562 iter_structural_ty(bcx, v0, t, |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
563 }
564 }
565 }
566 ty::TyTrait(..) => {
567 // No support in vtable for distinguishing destroying with
568 // versus without calling Drop::drop. Assert caller is
569 // okay with always calling the Drop impl, if any.
570 assert!(!skip_dtor);
571 let data_ptr = expr::get_dataptr(bcx, v0);
572 let vtable_ptr = Load(bcx, expr::get_meta(bcx, v0));
573 let dtor = Load(bcx, vtable_ptr);
574 Call(bcx,
575 dtor,
576 &[PointerCast(bcx, Load(bcx, data_ptr), Type::i8p(bcx.ccx()))],
577 None,
578 DebugLoc::None);
579 bcx
580 }
581 _ => {
582 if bcx.fcx.type_needs_drop(t) {
583 iter_structural_ty(bcx,
584 v0,
585 t,
586 |bb, vv, tt| drop_ty(bb, vv, tt, DebugLoc::None))
587 } else {
588 bcx
589 }
590 }
591 }
592 }