]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/util/elaborate_drops.rs
New upstream version 1.35.0+dfsg1
[rustc.git] / src / librustc_mir / util / elaborate_drops.rs
1 use std::fmt;
2 use rustc::hir;
3 use rustc::mir::*;
4 use rustc::middle::lang_items;
5 use rustc::traits::Reveal;
6 use rustc::ty::{self, Ty, TyCtxt};
7 use rustc::ty::layout::VariantIdx;
8 use rustc::ty::subst::SubstsRef;
9 use rustc::ty::util::IntTypeExt;
10 use rustc_data_structures::indexed_vec::Idx;
11 use crate::util::patch::MirPatch;
12
13 use std::u32;
14
15 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
16 pub enum DropFlagState {
17 Present, // i.e., initialized
18 Absent, // i.e., deinitialized or "moved"
19 }
20
21 impl DropFlagState {
22 pub fn value(self) -> bool {
23 match self {
24 DropFlagState::Present => true,
25 DropFlagState::Absent => false
26 }
27 }
28 }
29
30 #[derive(Debug)]
31 pub enum DropStyle {
32 Dead,
33 Static,
34 Conditional,
35 Open,
36 }
37
38 #[derive(Debug)]
39 pub enum DropFlagMode {
40 Shallow,
41 Deep
42 }
43
44 #[derive(Copy, Clone, Debug)]
45 pub enum Unwind {
46 To(BasicBlock),
47 InCleanup
48 }
49
50 impl Unwind {
51 fn is_cleanup(self) -> bool {
52 match self {
53 Unwind::To(..) => false,
54 Unwind::InCleanup => true
55 }
56 }
57
58 fn into_option(self) -> Option<BasicBlock> {
59 match self {
60 Unwind::To(bb) => Some(bb),
61 Unwind::InCleanup => None,
62 }
63 }
64
65 fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
66 match self {
67 Unwind::To(bb) => Unwind::To(f(bb)),
68 Unwind::InCleanup => Unwind::InCleanup
69 }
70 }
71 }
72
73 pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
74 type Path : Copy + fmt::Debug;
75
76 fn patch(&mut self) -> &mut MirPatch<'tcx>;
77 fn mir(&self) -> &'a Mir<'tcx>;
78 fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
79 fn param_env(&self) -> ty::ParamEnv<'tcx>;
80
81 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
82 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
83 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
84
85
86 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
87 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
88 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
89 fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path>;
90 }
91
92 #[derive(Debug)]
93 struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
94 where D : DropElaborator<'b, 'tcx> + 'l
95 {
96 elaborator: &'l mut D,
97
98 source_info: SourceInfo,
99
100 place: &'l Place<'tcx>,
101 path: D::Path,
102 succ: BasicBlock,
103 unwind: Unwind,
104 }
105
106 pub fn elaborate_drop<'b, 'tcx, D>(
107 elaborator: &mut D,
108 source_info: SourceInfo,
109 place: &Place<'tcx>,
110 path: D::Path,
111 succ: BasicBlock,
112 unwind: Unwind,
113 bb: BasicBlock)
114 where D: DropElaborator<'b, 'tcx>
115 {
116 DropCtxt {
117 elaborator, source_info, place, path, succ, unwind
118 }.elaborate_drop(bb)
119 }
120
121 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
122 where D: DropElaborator<'b, 'tcx>
123 {
124 fn place_ty(&self, place: &Place<'tcx>) -> Ty<'tcx> {
125 place.ty(self.elaborator.mir(), self.tcx()).ty
126 }
127
128 fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
129 self.elaborator.tcx()
130 }
131
132 /// This elaborates a single drop instruction, located at `bb`, and
133 /// patches over it.
134 ///
135 /// The elaborated drop checks the drop flags to only drop what
136 /// is initialized.
137 ///
138 /// In addition, the relevant drop flags also need to be cleared
139 /// to avoid double-drops. However, in the middle of a complex
140 /// drop, one must avoid clearing some of the flags before they
141 /// are read, as that would cause a memory leak.
142 ///
143 /// In particular, when dropping an ADT, multiple fields may be
144 /// joined together under the `rest` subpath. They are all controlled
145 /// by the primary drop flag, but only the last rest-field dropped
146 /// should clear it (and it must also not clear anything else).
147 //
148 // FIXME: I think we should just control the flags externally,
149 // and then we do not need this machinery.
150 pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
151 debug!("elaborate_drop({:?})", self);
152 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
153 debug!("elaborate_drop({:?}): live - {:?}", self, style);
154 match style {
155 DropStyle::Dead => {
156 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
157 target: self.succ
158 });
159 }
160 DropStyle::Static => {
161 let loc = self.terminator_loc(bb);
162 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
163 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
164 location: self.place.clone(),
165 target: self.succ,
166 unwind: self.unwind.into_option(),
167 });
168 }
169 DropStyle::Conditional => {
170 let unwind = self.unwind; // FIXME(#43234)
171 let succ = self.succ;
172 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
173 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
174 target: drop_bb
175 });
176 }
177 DropStyle::Open => {
178 let drop_bb = self.open_drop();
179 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
180 target: drop_bb
181 });
182 }
183 }
184 }
185
186 /// Returns the place and move path for each field of `variant`,
187 /// (the move path is `None` if the field is a rest field).
188 fn move_paths_for_fields(&self,
189 base_place: &Place<'tcx>,
190 variant_path: D::Path,
191 variant: &'tcx ty::VariantDef,
192 substs: SubstsRef<'tcx>)
193 -> Vec<(Place<'tcx>, Option<D::Path>)>
194 {
195 variant.fields.iter().enumerate().map(|(i, f)| {
196 let field = Field::new(i);
197 let subpath = self.elaborator.field_subpath(variant_path, field);
198
199 assert_eq!(self.elaborator.param_env().reveal, Reveal::All);
200 let field_ty = self.tcx().normalize_erasing_regions(
201 self.elaborator.param_env(),
202 f.ty(self.tcx(), substs),
203 );
204 (base_place.clone().field(field, field_ty), subpath)
205 }).collect()
206 }
207
208 fn drop_subpath(&mut self,
209 place: &Place<'tcx>,
210 path: Option<D::Path>,
211 succ: BasicBlock,
212 unwind: Unwind)
213 -> BasicBlock
214 {
215 if let Some(path) = path {
216 debug!("drop_subpath: for std field {:?}", place);
217
218 DropCtxt {
219 elaborator: self.elaborator,
220 source_info: self.source_info,
221 path, place, succ, unwind,
222 }.elaborated_drop_block()
223 } else {
224 debug!("drop_subpath: for rest field {:?}", place);
225
226 DropCtxt {
227 elaborator: self.elaborator,
228 source_info: self.source_info,
229 place, succ, unwind,
230 // Using `self.path` here to condition the drop on
231 // our own drop flag.
232 path: self.path
233 }.complete_drop(None, succ, unwind)
234 }
235 }
236
237 /// Creates one-half of the drop ladder for a list of fields, and return
238 /// the list of steps in it in reverse order, with the first step
239 /// dropping 0 fields and so on.
240 ///
241 /// `unwind_ladder` is such a list of steps in reverse order,
242 /// which is called if the matching step of the drop glue panics.
243 fn drop_halfladder(&mut self,
244 unwind_ladder: &[Unwind],
245 mut succ: BasicBlock,
246 fields: &[(Place<'tcx>, Option<D::Path>)])
247 -> Vec<BasicBlock>
248 {
249 Some(succ).into_iter().chain(
250 fields.iter().rev().zip(unwind_ladder)
251 .map(|(&(ref place, path), &unwind_succ)| {
252 succ = self.drop_subpath(place, path, succ, unwind_succ);
253 succ
254 })
255 ).collect()
256 }
257
258 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
259 // Clear the "master" drop flag at the end. This is needed
260 // because the "master" drop protects the ADT's discriminant,
261 // which is invalidated after the ADT is dropped.
262 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
263 (
264 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
265 unwind.map(|unwind| {
266 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
267 })
268 )
269 }
270
271 /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
272 ///
273 /// For example, with 3 fields, the drop ladder is
274 ///
275 /// .d0:
276 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
277 /// .d1:
278 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
279 /// .d2:
280 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
281 /// .c1:
282 /// ELAB(drop location.1 [target=.c2])
283 /// .c2:
284 /// ELAB(drop location.2 [target=`self.unwind`])
285 ///
286 /// NOTE: this does not clear the master drop flag, so you need
287 /// to point succ/unwind on a `drop_ladder_bottom`.
288 fn drop_ladder<'a>(&mut self,
289 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
290 succ: BasicBlock,
291 unwind: Unwind)
292 -> (BasicBlock, Unwind)
293 {
294 debug!("drop_ladder({:?}, {:?})", self, fields);
295
296 let mut fields = fields;
297 fields.retain(|&(ref place, _)| {
298 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
299 });
300
301 debug!("drop_ladder - fields needing drop: {:?}", fields);
302
303 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
304 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
305 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
306 halfladder.into_iter().map(Unwind::To).collect()
307 } else {
308 unwind_ladder
309 };
310
311 let normal_ladder =
312 self.drop_halfladder(&unwind_ladder, succ, &fields);
313
314 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
315 }
316
317 fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
318 -> BasicBlock
319 {
320 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
321
322 let fields = tys.iter().enumerate().map(|(i, &ty)| {
323 (self.place.clone().field(Field::new(i), ty),
324 self.elaborator.field_subpath(self.path, Field::new(i)))
325 }).collect();
326
327 let (succ, unwind) = self.drop_ladder_bottom();
328 self.drop_ladder(fields, succ, unwind).0
329 }
330
331 fn open_drop_for_box<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>)
332 -> BasicBlock
333 {
334 debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
335
336 let interior = self.place.clone().deref();
337 let interior_path = self.elaborator.deref_subpath(self.path);
338
339 let succ = self.succ; // FIXME(#43234)
340 let unwind = self.unwind;
341 let succ = self.box_free_block(adt, substs, succ, unwind);
342 let unwind_succ = self.unwind.map(|unwind| {
343 self.box_free_block(adt, substs, unwind, Unwind::InCleanup)
344 });
345
346 self.drop_subpath(&interior, interior_path, succ, unwind_succ)
347 }
348
349 fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>)
350 -> BasicBlock {
351 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
352 if adt.variants.len() == 0 {
353 return self.elaborator.patch().new_block(BasicBlockData {
354 statements: vec![],
355 terminator: Some(Terminator {
356 source_info: self.source_info,
357 kind: TerminatorKind::Unreachable
358 }),
359 is_cleanup: self.unwind.is_cleanup()
360 });
361 }
362
363 let skip_contents =
364 adt.is_union() || Some(adt.did) == self.tcx().lang_items().manually_drop();
365 let contents_drop = if skip_contents {
366 (self.succ, self.unwind)
367 } else {
368 self.open_drop_for_adt_contents(adt, substs)
369 };
370
371 if adt.has_dtor(self.tcx()) {
372 self.destructor_call_block(contents_drop)
373 } else {
374 contents_drop.0
375 }
376 }
377
378 fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
379 substs: SubstsRef<'tcx>)
380 -> (BasicBlock, Unwind) {
381 let (succ, unwind) = self.drop_ladder_bottom();
382 if !adt.is_enum() {
383 let fields = self.move_paths_for_fields(
384 self.place,
385 self.path,
386 &adt.variants[VariantIdx::new(0)],
387 substs
388 );
389 self.drop_ladder(fields, succ, unwind)
390 } else {
391 self.open_drop_for_multivariant(adt, substs, succ, unwind)
392 }
393 }
394
395 fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
396 substs: SubstsRef<'tcx>,
397 succ: BasicBlock,
398 unwind: Unwind)
399 -> (BasicBlock, Unwind) {
400 let mut values = Vec::with_capacity(adt.variants.len());
401 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
402 let mut unwind_blocks = if unwind.is_cleanup() {
403 None
404 } else {
405 Some(Vec::with_capacity(adt.variants.len()))
406 };
407
408 let mut have_otherwise = false;
409
410 for (variant_index, discr) in adt.discriminants(self.tcx()) {
411 let subpath = self.elaborator.downcast_subpath(
412 self.path, variant_index);
413 if let Some(variant_path) = subpath {
414 let base_place = self.place.clone().elem(
415 ProjectionElem::Downcast(Some(adt.variants[variant_index].ident.name),
416 variant_index));
417 let fields = self.move_paths_for_fields(
418 &base_place,
419 variant_path,
420 &adt.variants[variant_index],
421 substs);
422 values.push(discr.val);
423 if let Unwind::To(unwind) = unwind {
424 // We can't use the half-ladder from the original
425 // drop ladder, because this breaks the
426 // "funclet can't have 2 successor funclets"
427 // requirement from MSVC:
428 //
429 // switch unwind-switch
430 // / \ / \
431 // v1.0 v2.0 v2.0-unwind v1.0-unwind
432 // | | / |
433 // v1.1-unwind v2.1-unwind |
434 // ^ |
435 // \-------------------------------/
436 //
437 // Create a duplicate half-ladder to avoid that. We
438 // could technically only do this on MSVC, but I
439 // I want to minimize the divergence between MSVC
440 // and non-MSVC.
441
442 let unwind_blocks = unwind_blocks.as_mut().unwrap();
443 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
444 let halfladder =
445 self.drop_halfladder(&unwind_ladder, unwind, &fields);
446 unwind_blocks.push(halfladder.last().cloned().unwrap());
447 }
448 let (normal, _) = self.drop_ladder(fields, succ, unwind);
449 normal_blocks.push(normal);
450 } else {
451 have_otherwise = true;
452 }
453 }
454
455 if have_otherwise {
456 normal_blocks.push(self.drop_block(succ, unwind));
457 if let Unwind::To(unwind) = unwind {
458 unwind_blocks.as_mut().unwrap().push(
459 self.drop_block(unwind, Unwind::InCleanup)
460 );
461 }
462 } else {
463 values.pop();
464 }
465
466 (self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
467 unwind.map(|unwind| {
468 self.adt_switch_block(
469 adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
470 )
471 }))
472 }
473
474 fn adt_switch_block(&mut self,
475 adt: &'tcx ty::AdtDef,
476 blocks: Vec<BasicBlock>,
477 values: &[u128],
478 succ: BasicBlock,
479 unwind: Unwind)
480 -> BasicBlock {
481 // If there are multiple variants, then if something
482 // is present within the enum the discriminant, tracked
483 // by the rest path, must be initialized.
484 //
485 // Additionally, we do not want to switch on the
486 // discriminant after it is free-ed, because that
487 // way lies only trouble.
488 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
489 let discr = Place::Base(PlaceBase::Local(self.new_temp(discr_ty)));
490 let discr_rv = Rvalue::Discriminant(self.place.clone());
491 let switch_block = BasicBlockData {
492 statements: vec![self.assign(&discr, discr_rv)],
493 terminator: Some(Terminator {
494 source_info: self.source_info,
495 kind: TerminatorKind::SwitchInt {
496 discr: Operand::Move(discr),
497 switch_ty: discr_ty,
498 values: From::from(values.to_owned()),
499 targets: blocks,
500 }
501 }),
502 is_cleanup: unwind.is_cleanup(),
503 };
504 let switch_block = self.elaborator.patch().new_block(switch_block);
505 self.drop_flag_test_block(switch_block, succ, unwind)
506 }
507
508 fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
509 -> BasicBlock
510 {
511 debug!("destructor_call_block({:?}, {:?})", self, succ);
512 let tcx = self.tcx();
513 let drop_trait = tcx.lang_items().drop_trait().unwrap();
514 let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
515 let ty = self.place_ty(self.place);
516 let substs = tcx.mk_substs_trait(ty, &[]);
517
518 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
519 ty,
520 mutbl: hir::Mutability::MutMutable
521 });
522 let ref_place = self.new_temp(ref_ty);
523 let unit_temp = Place::Base(PlaceBase::Local(self.new_temp(tcx.mk_unit())));
524
525 let result = BasicBlockData {
526 statements: vec![self.assign(
527 &Place::Base(PlaceBase::Local(ref_place)),
528 Rvalue::Ref(tcx.types.re_erased,
529 BorrowKind::Mut { allow_two_phase_borrow: false },
530 self.place.clone())
531 )],
532 terminator: Some(Terminator {
533 kind: TerminatorKind::Call {
534 func: Operand::function_handle(tcx, drop_fn.def_id, substs,
535 self.source_info.span),
536 args: vec![Operand::Move(Place::Base(PlaceBase::Local(ref_place)))],
537 destination: Some((unit_temp, succ)),
538 cleanup: unwind.into_option(),
539 from_hir_call: true,
540 },
541 source_info: self.source_info,
542 }),
543 is_cleanup: unwind.is_cleanup(),
544 };
545 self.elaborator.patch().new_block(result)
546 }
547
548 /// create a loop that drops an array:
549 ///
550
551 ///
552 /// loop-block:
553 /// can_go = cur == length_or_end
554 /// if can_go then succ else drop-block
555 /// drop-block:
556 /// if ptr_based {
557 /// ptr = &mut *cur
558 /// cur = cur.offset(1)
559 /// } else {
560 /// ptr = &mut P[cur]
561 /// cur = cur + 1
562 /// }
563 /// drop(ptr)
564 fn drop_loop(&mut self,
565 succ: BasicBlock,
566 cur: Local,
567 length_or_end: &Place<'tcx>,
568 ety: Ty<'tcx>,
569 unwind: Unwind,
570 ptr_based: bool)
571 -> BasicBlock
572 {
573 let copy = |place: &Place<'tcx>| Operand::Copy(place.clone());
574 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
575 let tcx = self.tcx();
576
577 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
578 ty: ety,
579 mutbl: hir::Mutability::MutMutable
580 });
581 let ptr = &Place::Base(PlaceBase::Local(self.new_temp(ref_ty)));
582 let can_go = &Place::Base(PlaceBase::Local(self.new_temp(tcx.types.bool)));
583
584 let one = self.constant_usize(1);
585 let (ptr_next, cur_next) = if ptr_based {
586 (Rvalue::Ref(
587 tcx.types.re_erased,
588 BorrowKind::Mut { allow_two_phase_borrow: false },
589 Place::Projection(Box::new(Projection {
590 base: Place::Base(PlaceBase::Local(cur)),
591 elem: ProjectionElem::Deref,
592 }))
593 ),
594 Rvalue::BinaryOp(BinOp::Offset, copy(&Place::Base(PlaceBase::Local(cur))), one))
595 } else {
596 (Rvalue::Ref(
597 tcx.types.re_erased,
598 BorrowKind::Mut { allow_two_phase_borrow: false },
599 self.place.clone().index(cur)),
600 Rvalue::BinaryOp(BinOp::Add, copy(&Place::Base(PlaceBase::Local(cur))), one))
601 };
602
603 let drop_block = BasicBlockData {
604 statements: vec![
605 self.assign(ptr, ptr_next),
606 self.assign(&Place::Base(PlaceBase::Local(cur)), cur_next)
607 ],
608 is_cleanup: unwind.is_cleanup(),
609 terminator: Some(Terminator {
610 source_info: self.source_info,
611 // this gets overwritten by drop elaboration.
612 kind: TerminatorKind::Unreachable,
613 })
614 };
615 let drop_block = self.elaborator.patch().new_block(drop_block);
616
617 let loop_block = BasicBlockData {
618 statements: vec![
619 self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
620 copy(&Place::Base(PlaceBase::Local(cur))),
621 copy(length_or_end)))
622 ],
623 is_cleanup: unwind.is_cleanup(),
624 terminator: Some(Terminator {
625 source_info: self.source_info,
626 kind: TerminatorKind::if_(tcx, move_(can_go), succ, drop_block)
627 })
628 };
629 let loop_block = self.elaborator.patch().new_block(loop_block);
630
631 self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
632 location: ptr.clone().deref(),
633 target: loop_block,
634 unwind: unwind.into_option()
635 });
636
637 loop_block
638 }
639
640 fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock {
641 debug!("open_drop_for_array({:?}, {:?})", ety, opt_size);
642
643 // if size_of::<ety>() == 0 {
644 // index_based_loop
645 // } else {
646 // ptr_based_loop
647 // }
648
649 if let Some(size) = opt_size {
650 assert!(size <= (u32::MAX as u64),
651 "move out check doesn't implemented for array bigger then u32");
652 let size = size as u32;
653 let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size).map(|i| {
654 (self.place.clone().elem(ProjectionElem::ConstantIndex{
655 offset: i,
656 min_length: size,
657 from_end: false
658 }),
659 self.elaborator.array_subpath(self.path, i, size))
660 }).collect();
661
662 if fields.iter().any(|(_,path)| path.is_some()) {
663 let (succ, unwind) = self.drop_ladder_bottom();
664 return self.drop_ladder(fields, succ, unwind).0
665 }
666 }
667
668 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
669 let tcx = self.tcx();
670 let size = &Place::Base(PlaceBase::Local(self.new_temp(tcx.types.usize)));
671 let size_is_zero = &Place::Base(PlaceBase::Local(self.new_temp(tcx.types.bool)));
672 let base_block = BasicBlockData {
673 statements: vec![
674 self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
675 self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
676 move_(size),
677 self.constant_usize(0)))
678 ],
679 is_cleanup: self.unwind.is_cleanup(),
680 terminator: Some(Terminator {
681 source_info: self.source_info,
682 kind: TerminatorKind::if_(
683 tcx,
684 move_(size_is_zero),
685 self.drop_loop_pair(ety, false),
686 self.drop_loop_pair(ety, true)
687 )
688 })
689 };
690 self.elaborator.patch().new_block(base_block)
691 }
692
693 // create a pair of drop-loops of `place`, which drops its contents
694 // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
695 // otherwise create an index loop.
696 fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
697 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
698 let tcx = self.tcx();
699 let iter_ty = if ptr_based {
700 tcx.mk_mut_ptr(ety)
701 } else {
702 tcx.types.usize
703 };
704
705 let cur = self.new_temp(iter_ty);
706 let length = Place::Base(PlaceBase::Local(self.new_temp(tcx.types.usize)));
707 let length_or_end = if ptr_based {
708 // FIXME check if we want to make it return a `Place` directly
709 // if all use sites want a `Place::Base` anyway.
710 let temp = self.new_temp(iter_ty);
711 Place::Base(PlaceBase::Local(temp))
712 } else {
713 length.clone()
714 };
715
716 let unwind = self.unwind.map(|unwind| {
717 self.drop_loop(unwind,
718 cur,
719 &length_or_end,
720 ety,
721 Unwind::InCleanup,
722 ptr_based)
723 });
724
725 let succ = self.succ; // FIXME(#43234)
726 let loop_block = self.drop_loop(
727 succ,
728 cur,
729 &length_or_end,
730 ety,
731 unwind,
732 ptr_based);
733
734 let cur = Place::Base(PlaceBase::Local(cur));
735 let zero = self.constant_usize(0);
736 let mut drop_block_stmts = vec![];
737 drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.place.clone())));
738 if ptr_based {
739 let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
740 let tmp = Place::Base(PlaceBase::Local(self.new_temp(tmp_ty)));
741 // tmp = &mut P;
742 // cur = tmp as *mut T;
743 // end = Offset(cur, len);
744 drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
745 tcx.types.re_erased,
746 BorrowKind::Mut { allow_two_phase_borrow: false },
747 self.place.clone()
748 )));
749 drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
750 CastKind::Misc, Operand::Move(tmp), iter_ty
751 )));
752 drop_block_stmts.push(self.assign(&length_or_end,
753 Rvalue::BinaryOp(BinOp::Offset,
754 Operand::Copy(cur), Operand::Move(length)
755 )));
756 } else {
757 // index = 0 (length already pushed)
758 drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
759 }
760 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
761 statements: drop_block_stmts,
762 is_cleanup: unwind.is_cleanup(),
763 terminator: Some(Terminator {
764 source_info: self.source_info,
765 kind: TerminatorKind::Goto { target: loop_block }
766 })
767 });
768
769 // FIXME(#34708): handle partially-dropped array/slice elements.
770 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
771 self.drop_flag_test_block(reset_block, succ, unwind)
772 }
773
774 /// The slow-path - create an "open", elaborated drop for a type
775 /// which is moved-out-of only partially, and patch `bb` to a jump
776 /// to it. This must not be called on ADTs with a destructor,
777 /// as these can't be moved-out-of, except for `Box<T>`, which is
778 /// special-cased.
779 ///
780 /// This creates a "drop ladder" that drops the needed fields of the
781 /// ADT, both in the success case or if one of the destructors fail.
782 fn open_drop<'a>(&mut self) -> BasicBlock {
783 let ty = self.place_ty(self.place);
784 match ty.sty {
785 ty::Closure(def_id, substs) => {
786 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
787 self.open_drop_for_tuple(&tys)
788 }
789 // Note that `elaborate_drops` only drops the upvars of a generator,
790 // and this is ok because `open_drop` here can only be reached
791 // within that own generator's resume function.
792 // This should only happen for the self argument on the resume function.
793 // It effetively only contains upvars until the generator transformation runs.
794 // See librustc_mir/transform/generator.rs for more details.
795 ty::Generator(def_id, substs, _) => {
796 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
797 self.open_drop_for_tuple(&tys)
798 }
799 ty::Tuple(tys) => {
800 self.open_drop_for_tuple(tys)
801 }
802 ty::Adt(def, substs) => {
803 if def.is_box() {
804 self.open_drop_for_box(def, substs)
805 } else {
806 self.open_drop_for_adt(def, substs)
807 }
808 }
809 ty::Dynamic(..) => {
810 let unwind = self.unwind; // FIXME(#43234)
811 let succ = self.succ;
812 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
813 }
814 ty::Array(ety, size) => {
815 let size = size.assert_usize(self.tcx());
816 self.open_drop_for_array(ety, size)
817 },
818 ty::Slice(ety) => self.open_drop_for_array(ety, None),
819
820 _ => bug!("open drop from non-ADT `{:?}`", ty)
821 }
822 }
823
824 /// Returns a basic block that drop a place using the context
825 /// and path in `c`. If `mode` is something, also clear `c`
826 /// according to it.
827 ///
828 /// if FLAG(self.path)
829 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
830 /// drop(self.place)
831 fn complete_drop<'a>(&mut self,
832 drop_mode: Option<DropFlagMode>,
833 succ: BasicBlock,
834 unwind: Unwind) -> BasicBlock
835 {
836 debug!("complete_drop({:?},{:?})", self, drop_mode);
837
838 let drop_block = self.drop_block(succ, unwind);
839 let drop_block = if let Some(mode) = drop_mode {
840 self.drop_flag_reset_block(mode, drop_block, unwind)
841 } else {
842 drop_block
843 };
844
845 self.drop_flag_test_block(drop_block, succ, unwind)
846 }
847
848 fn drop_flag_reset_block(&mut self,
849 mode: DropFlagMode,
850 succ: BasicBlock,
851 unwind: Unwind) -> BasicBlock
852 {
853 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
854
855 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
856 let block_start = Location { block: block, statement_index: 0 };
857 self.elaborator.clear_drop_flag(block_start, self.path, mode);
858 block
859 }
860
861 fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
862 debug!("elaborated_drop_block({:?})", self);
863 let unwind = self.unwind; // FIXME(#43234)
864 let succ = self.succ;
865 let blk = self.drop_block(succ, unwind);
866 self.elaborate_drop(blk);
867 blk
868 }
869
870 fn box_free_block<'a>(
871 &mut self,
872 adt: &'tcx ty::AdtDef,
873 substs: SubstsRef<'tcx>,
874 target: BasicBlock,
875 unwind: Unwind,
876 ) -> BasicBlock {
877 let block = self.unelaborated_free_block(adt, substs, target, unwind);
878 self.drop_flag_test_block(block, target, unwind)
879 }
880
881 fn unelaborated_free_block<'a>(
882 &mut self,
883 adt: &'tcx ty::AdtDef,
884 substs: SubstsRef<'tcx>,
885 target: BasicBlock,
886 unwind: Unwind
887 ) -> BasicBlock {
888 let tcx = self.tcx();
889 let unit_temp = Place::Base(PlaceBase::Local(self.new_temp(tcx.mk_unit())));
890 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
891 let args = adt.variants[VariantIdx::new(0)].fields.iter().enumerate().map(|(i, f)| {
892 let field = Field::new(i);
893 let field_ty = f.ty(self.tcx(), substs);
894 Operand::Move(self.place.clone().field(field, field_ty))
895 }).collect();
896
897 let call = TerminatorKind::Call {
898 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
899 args: args,
900 destination: Some((unit_temp, target)),
901 cleanup: None,
902 from_hir_call: false,
903 }; // FIXME(#43234)
904 let free_block = self.new_block(unwind, call);
905
906 let block_start = Location { block: free_block, statement_index: 0 };
907 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
908 free_block
909 }
910
911 fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
912 let block = TerminatorKind::Drop {
913 location: self.place.clone(),
914 target,
915 unwind: unwind.into_option()
916 };
917 self.new_block(unwind, block)
918 }
919
920 fn drop_flag_test_block(&mut self,
921 on_set: BasicBlock,
922 on_unset: BasicBlock,
923 unwind: Unwind)
924 -> BasicBlock
925 {
926 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
927 debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
928 self, on_set, on_unset, unwind, style);
929
930 match style {
931 DropStyle::Dead => on_unset,
932 DropStyle::Static => on_set,
933 DropStyle::Conditional | DropStyle::Open => {
934 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
935 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
936 self.new_block(unwind, term)
937 }
938 }
939 }
940
941 fn new_block<'a>(&mut self,
942 unwind: Unwind,
943 k: TerminatorKind<'tcx>)
944 -> BasicBlock
945 {
946 self.elaborator.patch().new_block(BasicBlockData {
947 statements: vec![],
948 terminator: Some(Terminator {
949 source_info: self.source_info, kind: k
950 }),
951 is_cleanup: unwind.is_cleanup()
952 })
953 }
954
955 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
956 self.elaborator.patch().new_temp(ty, self.source_info.span)
957 }
958
959 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
960 let mir = self.elaborator.mir();
961 self.elaborator.patch().terminator_loc(mir, bb)
962 }
963
964 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
965 Operand::Constant(box Constant {
966 span: self.source_info.span,
967 ty: self.tcx().types.usize,
968 user_ty: None,
969 literal: self.tcx().mk_const(
970 ty::Const::from_usize(self.tcx(), val.into())
971 ),
972 })
973 }
974
975 fn assign(&self, lhs: &Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
976 Statement {
977 source_info: self.source_info,
978 kind: StatementKind::Assign(lhs.clone(), box rhs)
979 }
980 }
981 }