]> git.proxmox.com Git - rustc.git/blob - src/librustc_mir/util/elaborate_drops.rs
New upstream version 1.28.0~beta.14+dfsg1
[rustc.git] / src / librustc_mir / util / elaborate_drops.rs
1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use std::fmt;
12 use rustc::hir;
13 use rustc::mir::*;
14 use rustc::middle::lang_items;
15 use rustc::traits::Reveal;
16 use rustc::ty::{self, Ty, TyCtxt};
17 use rustc::ty::subst::Substs;
18 use rustc::ty::util::IntTypeExt;
19 use rustc_data_structures::indexed_vec::Idx;
20 use util::patch::MirPatch;
21
22 use std::u32;
23
24 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
25 pub enum DropFlagState {
26 Present, // i.e. initialized
27 Absent, // i.e. deinitialized or "moved"
28 }
29
30 impl DropFlagState {
31 pub fn value(self) -> bool {
32 match self {
33 DropFlagState::Present => true,
34 DropFlagState::Absent => false
35 }
36 }
37 }
38
39 #[derive(Debug)]
40 pub enum DropStyle {
41 Dead,
42 Static,
43 Conditional,
44 Open,
45 }
46
47 #[derive(Debug)]
48 pub enum DropFlagMode {
49 Shallow,
50 Deep
51 }
52
53 #[derive(Copy, Clone, Debug)]
54 pub enum Unwind {
55 To(BasicBlock),
56 InCleanup
57 }
58
59 impl Unwind {
60 fn is_cleanup(self) -> bool {
61 match self {
62 Unwind::To(..) => false,
63 Unwind::InCleanup => true
64 }
65 }
66
67 fn into_option(self) -> Option<BasicBlock> {
68 match self {
69 Unwind::To(bb) => Some(bb),
70 Unwind::InCleanup => None,
71 }
72 }
73
74 fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
75 match self {
76 Unwind::To(bb) => Unwind::To(f(bb)),
77 Unwind::InCleanup => Unwind::InCleanup
78 }
79 }
80 }
81
82 pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
83 type Path : Copy + fmt::Debug;
84
85 fn patch(&mut self) -> &mut MirPatch<'tcx>;
86 fn mir(&self) -> &'a Mir<'tcx>;
87 fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx>;
88 fn param_env(&self) -> ty::ParamEnv<'tcx>;
89
90 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
91 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
92 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
93
94
95 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
96 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
97 fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path>;
98 fn array_subpath(&self, path: Self::Path, index: u32, size: u32) -> Option<Self::Path>;
99 }
100
101 #[derive(Debug)]
102 struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
103 where D : DropElaborator<'b, 'tcx> + 'l
104 {
105 elaborator: &'l mut D,
106
107 source_info: SourceInfo,
108
109 place: &'l Place<'tcx>,
110 path: D::Path,
111 succ: BasicBlock,
112 unwind: Unwind,
113 }
114
115 pub fn elaborate_drop<'b, 'tcx, D>(
116 elaborator: &mut D,
117 source_info: SourceInfo,
118 place: &Place<'tcx>,
119 path: D::Path,
120 succ: BasicBlock,
121 unwind: Unwind,
122 bb: BasicBlock)
123 where D: DropElaborator<'b, 'tcx>
124 {
125 DropCtxt {
126 elaborator, source_info, place, path, succ, unwind
127 }.elaborate_drop(bb)
128 }
129
130 impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
131 where D: DropElaborator<'b, 'tcx>
132 {
133 fn place_ty(&self, place: &Place<'tcx>) -> Ty<'tcx> {
134 place.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
135 }
136
137 fn tcx(&self) -> TyCtxt<'b, 'tcx, 'tcx> {
138 self.elaborator.tcx()
139 }
140
141 /// This elaborates a single drop instruction, located at `bb`, and
142 /// patches over it.
143 ///
144 /// The elaborated drop checks the drop flags to only drop what
145 /// is initialized.
146 ///
147 /// In addition, the relevant drop flags also need to be cleared
148 /// to avoid double-drops. However, in the middle of a complex
149 /// drop, one must avoid clearing some of the flags before they
150 /// are read, as that would cause a memory leak.
151 ///
152 /// In particular, when dropping an ADT, multiple fields may be
153 /// joined together under the `rest` subpath. They are all controlled
154 /// by the primary drop flag, but only the last rest-field dropped
155 /// should clear it (and it must also not clear anything else).
156 ///
157 /// FIXME: I think we should just control the flags externally
158 /// and then we do not need this machinery.
159 pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
160 debug!("elaborate_drop({:?})", self);
161 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
162 debug!("elaborate_drop({:?}): live - {:?}", self, style);
163 match style {
164 DropStyle::Dead => {
165 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
166 target: self.succ
167 });
168 }
169 DropStyle::Static => {
170 let loc = self.terminator_loc(bb);
171 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
172 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
173 location: self.place.clone(),
174 target: self.succ,
175 unwind: self.unwind.into_option(),
176 });
177 }
178 DropStyle::Conditional => {
179 let unwind = self.unwind; // FIXME(#43234)
180 let succ = self.succ;
181 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
182 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
183 target: drop_bb
184 });
185 }
186 DropStyle::Open => {
187 let drop_bb = self.open_drop();
188 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
189 target: drop_bb
190 });
191 }
192 }
193 }
194
195 /// Return the place and move path for each field of `variant`,
196 /// (the move path is `None` if the field is a rest field).
197 fn move_paths_for_fields(&self,
198 base_place: &Place<'tcx>,
199 variant_path: D::Path,
200 variant: &'tcx ty::VariantDef,
201 substs: &'tcx Substs<'tcx>)
202 -> Vec<(Place<'tcx>, Option<D::Path>)>
203 {
204 variant.fields.iter().enumerate().map(|(i, f)| {
205 let field = Field::new(i);
206 let subpath = self.elaborator.field_subpath(variant_path, field);
207
208 assert_eq!(self.elaborator.param_env().reveal, Reveal::All);
209 let field_ty = self.tcx().normalize_erasing_regions(
210 self.elaborator.param_env(),
211 f.ty(self.tcx(), substs),
212 );
213 (base_place.clone().field(field, field_ty), subpath)
214 }).collect()
215 }
216
217 fn drop_subpath(&mut self,
218 place: &Place<'tcx>,
219 path: Option<D::Path>,
220 succ: BasicBlock,
221 unwind: Unwind)
222 -> BasicBlock
223 {
224 if let Some(path) = path {
225 debug!("drop_subpath: for std field {:?}", place);
226
227 DropCtxt {
228 elaborator: self.elaborator,
229 source_info: self.source_info,
230 path, place, succ, unwind,
231 }.elaborated_drop_block()
232 } else {
233 debug!("drop_subpath: for rest field {:?}", place);
234
235 DropCtxt {
236 elaborator: self.elaborator,
237 source_info: self.source_info,
238 place, succ, unwind,
239 // Using `self.path` here to condition the drop on
240 // our own drop flag.
241 path: self.path
242 }.complete_drop(None, succ, unwind)
243 }
244 }
245
246 /// Create one-half of the drop ladder for a list of fields, and return
247 /// the list of steps in it in reverse order, with the first step
248 /// dropping 0 fields and so on.
249 ///
250 /// `unwind_ladder` is such a list of steps in reverse order,
251 /// which is called if the matching step of the drop glue panics.
252 fn drop_halfladder(&mut self,
253 unwind_ladder: &[Unwind],
254 mut succ: BasicBlock,
255 fields: &[(Place<'tcx>, Option<D::Path>)])
256 -> Vec<BasicBlock>
257 {
258 Some(succ).into_iter().chain(
259 fields.iter().rev().zip(unwind_ladder)
260 .map(|(&(ref place, path), &unwind_succ)| {
261 succ = self.drop_subpath(place, path, succ, unwind_succ);
262 succ
263 })
264 ).collect()
265 }
266
267 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
268 // Clear the "master" drop flag at the end. This is needed
269 // because the "master" drop protects the ADT's discriminant,
270 // which is invalidated after the ADT is dropped.
271 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#43234)
272 (
273 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
274 unwind.map(|unwind| {
275 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
276 })
277 )
278 }
279
280 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
281 ///
282 /// For example, with 3 fields, the drop ladder is
283 ///
284 /// .d0:
285 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
286 /// .d1:
287 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
288 /// .d2:
289 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
290 /// .c1:
291 /// ELAB(drop location.1 [target=.c2])
292 /// .c2:
293 /// ELAB(drop location.2 [target=`self.unwind`])
294 ///
295 /// NOTE: this does not clear the master drop flag, so you need
296 /// to point succ/unwind on a `drop_ladder_bottom`.
297 fn drop_ladder<'a>(&mut self,
298 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
299 succ: BasicBlock,
300 unwind: Unwind)
301 -> (BasicBlock, Unwind)
302 {
303 debug!("drop_ladder({:?}, {:?})", self, fields);
304
305 let mut fields = fields;
306 fields.retain(|&(ref place, _)| {
307 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.param_env())
308 });
309
310 debug!("drop_ladder - fields needing drop: {:?}", fields);
311
312 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
313 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
314 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
315 halfladder.into_iter().map(Unwind::To).collect()
316 } else {
317 unwind_ladder
318 };
319
320 let normal_ladder =
321 self.drop_halfladder(&unwind_ladder, succ, &fields);
322
323 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
324 }
325
326 fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
327 -> BasicBlock
328 {
329 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
330
331 let fields = tys.iter().enumerate().map(|(i, &ty)| {
332 (self.place.clone().field(Field::new(i), ty),
333 self.elaborator.field_subpath(self.path, Field::new(i)))
334 }).collect();
335
336 let (succ, unwind) = self.drop_ladder_bottom();
337 self.drop_ladder(fields, succ, unwind).0
338 }
339
340 fn open_drop_for_box<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
341 -> BasicBlock
342 {
343 debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
344
345 let interior = self.place.clone().deref();
346 let interior_path = self.elaborator.deref_subpath(self.path);
347
348 let succ = self.succ; // FIXME(#43234)
349 let unwind = self.unwind;
350 let succ = self.box_free_block(adt, substs, succ, unwind);
351 let unwind_succ = self.unwind.map(|unwind| {
352 self.box_free_block(adt, substs, unwind, Unwind::InCleanup)
353 });
354
355 self.drop_subpath(&interior, interior_path, succ, unwind_succ)
356 }
357
358 fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
359 -> BasicBlock {
360 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
361 if adt.variants.len() == 0 {
362 return self.elaborator.patch().new_block(BasicBlockData {
363 statements: vec![],
364 terminator: Some(Terminator {
365 source_info: self.source_info,
366 kind: TerminatorKind::Unreachable
367 }),
368 is_cleanup: self.unwind.is_cleanup()
369 });
370 }
371
372 let contents_drop = if adt.is_union() {
373 (self.succ, self.unwind)
374 } else {
375 self.open_drop_for_adt_contents(adt, substs)
376 };
377
378 if adt.has_dtor(self.tcx()) {
379 self.destructor_call_block(contents_drop)
380 } else {
381 contents_drop.0
382 }
383 }
384
385 fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
386 substs: &'tcx Substs<'tcx>)
387 -> (BasicBlock, Unwind) {
388 let (succ, unwind) = self.drop_ladder_bottom();
389 if !adt.is_enum() {
390 let fields = self.move_paths_for_fields(
391 self.place,
392 self.path,
393 &adt.variants[0],
394 substs
395 );
396 self.drop_ladder(fields, succ, unwind)
397 } else {
398 self.open_drop_for_multivariant(adt, substs, succ, unwind)
399 }
400 }
401
402 fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
403 substs: &'tcx Substs<'tcx>,
404 succ: BasicBlock,
405 unwind: Unwind)
406 -> (BasicBlock, Unwind) {
407 let mut values = Vec::with_capacity(adt.variants.len());
408 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
409 let mut unwind_blocks = if unwind.is_cleanup() {
410 None
411 } else {
412 Some(Vec::with_capacity(adt.variants.len()))
413 };
414
415 let mut have_otherwise = false;
416
417 for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
418 let subpath = self.elaborator.downcast_subpath(
419 self.path, variant_index);
420 if let Some(variant_path) = subpath {
421 let base_place = self.place.clone().elem(
422 ProjectionElem::Downcast(adt, variant_index)
423 );
424 let fields = self.move_paths_for_fields(
425 &base_place,
426 variant_path,
427 &adt.variants[variant_index],
428 substs);
429 values.push(discr.val);
430 if let Unwind::To(unwind) = unwind {
431 // We can't use the half-ladder from the original
432 // drop ladder, because this breaks the
433 // "funclet can't have 2 successor funclets"
434 // requirement from MSVC:
435 //
436 // switch unwind-switch
437 // / \ / \
438 // v1.0 v2.0 v2.0-unwind v1.0-unwind
439 // | | / |
440 // v1.1-unwind v2.1-unwind |
441 // ^ |
442 // \-------------------------------/
443 //
444 // Create a duplicate half-ladder to avoid that. We
445 // could technically only do this on MSVC, but I
446 // I want to minimize the divergence between MSVC
447 // and non-MSVC.
448
449 let unwind_blocks = unwind_blocks.as_mut().unwrap();
450 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
451 let halfladder =
452 self.drop_halfladder(&unwind_ladder, unwind, &fields);
453 unwind_blocks.push(halfladder.last().cloned().unwrap());
454 }
455 let (normal, _) = self.drop_ladder(fields, succ, unwind);
456 normal_blocks.push(normal);
457 } else {
458 have_otherwise = true;
459 }
460 }
461
462 if have_otherwise {
463 normal_blocks.push(self.drop_block(succ, unwind));
464 if let Unwind::To(unwind) = unwind {
465 unwind_blocks.as_mut().unwrap().push(
466 self.drop_block(unwind, Unwind::InCleanup)
467 );
468 }
469 } else {
470 values.pop();
471 }
472
473 (self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
474 unwind.map(|unwind| {
475 self.adt_switch_block(
476 adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
477 )
478 }))
479 }
480
481 fn adt_switch_block(&mut self,
482 adt: &'tcx ty::AdtDef,
483 blocks: Vec<BasicBlock>,
484 values: &[u128],
485 succ: BasicBlock,
486 unwind: Unwind)
487 -> BasicBlock {
488 // If there are multiple variants, then if something
489 // is present within the enum the discriminant, tracked
490 // by the rest path, must be initialized.
491 //
492 // Additionally, we do not want to switch on the
493 // discriminant after it is free-ed, because that
494 // way lies only trouble.
495 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
496 let discr = Place::Local(self.new_temp(discr_ty));
497 let discr_rv = Rvalue::Discriminant(self.place.clone());
498 let switch_block = BasicBlockData {
499 statements: vec![self.assign(&discr, discr_rv)],
500 terminator: Some(Terminator {
501 source_info: self.source_info,
502 kind: TerminatorKind::SwitchInt {
503 discr: Operand::Move(discr),
504 switch_ty: discr_ty,
505 values: From::from(values.to_owned()),
506 targets: blocks,
507 }
508 }),
509 is_cleanup: unwind.is_cleanup(),
510 };
511 let switch_block = self.elaborator.patch().new_block(switch_block);
512 self.drop_flag_test_block(switch_block, succ, unwind)
513 }
514
515 fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
516 -> BasicBlock
517 {
518 debug!("destructor_call_block({:?}, {:?})", self, succ);
519 let tcx = self.tcx();
520 let drop_trait = tcx.lang_items().drop_trait().unwrap();
521 let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
522 let ty = self.place_ty(self.place);
523 let substs = tcx.mk_substs_trait(ty, &[]);
524
525 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
526 ty,
527 mutbl: hir::Mutability::MutMutable
528 });
529 let ref_place = self.new_temp(ref_ty);
530 let unit_temp = Place::Local(self.new_temp(tcx.mk_nil()));
531
532 let result = BasicBlockData {
533 statements: vec![self.assign(
534 &Place::Local(ref_place),
535 Rvalue::Ref(tcx.types.re_erased,
536 BorrowKind::Mut { allow_two_phase_borrow: false },
537 self.place.clone())
538 )],
539 terminator: Some(Terminator {
540 kind: TerminatorKind::Call {
541 func: Operand::function_handle(tcx, drop_fn.def_id, substs,
542 self.source_info.span),
543 args: vec![Operand::Move(Place::Local(ref_place))],
544 destination: Some((unit_temp, succ)),
545 cleanup: unwind.into_option(),
546 },
547 source_info: self.source_info
548 }),
549 is_cleanup: unwind.is_cleanup(),
550 };
551 self.elaborator.patch().new_block(result)
552 }
553
554 /// create a loop that drops an array:
555 ///
556
557 ///
558 /// loop-block:
559 /// can_go = cur == length_or_end
560 /// if can_go then succ else drop-block
561 /// drop-block:
562 /// if ptr_based {
563 /// ptr = cur
564 /// cur = cur.offset(1)
565 /// } else {
566 /// ptr = &mut P[cur]
567 /// cur = cur + 1
568 /// }
569 /// drop(ptr)
570 fn drop_loop(&mut self,
571 succ: BasicBlock,
572 cur: Local,
573 length_or_end: &Place<'tcx>,
574 ety: Ty<'tcx>,
575 unwind: Unwind,
576 ptr_based: bool)
577 -> BasicBlock
578 {
579 let copy = |place: &Place<'tcx>| Operand::Copy(place.clone());
580 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
581 let tcx = self.tcx();
582
583 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
584 ty: ety,
585 mutbl: hir::Mutability::MutMutable
586 });
587 let ptr = &Place::Local(self.new_temp(ref_ty));
588 let can_go = &Place::Local(self.new_temp(tcx.types.bool));
589
590 let one = self.constant_usize(1);
591 let (ptr_next, cur_next) = if ptr_based {
592 (Rvalue::Use(copy(&Place::Local(cur))),
593 Rvalue::BinaryOp(BinOp::Offset, copy(&Place::Local(cur)), one))
594 } else {
595 (Rvalue::Ref(
596 tcx.types.re_erased,
597 BorrowKind::Mut { allow_two_phase_borrow: false },
598 self.place.clone().index(cur)),
599 Rvalue::BinaryOp(BinOp::Add, copy(&Place::Local(cur)), one))
600 };
601
602 let drop_block = BasicBlockData {
603 statements: vec![
604 self.assign(ptr, ptr_next),
605 self.assign(&Place::Local(cur), cur_next)
606 ],
607 is_cleanup: unwind.is_cleanup(),
608 terminator: Some(Terminator {
609 source_info: self.source_info,
610 // this gets overwritten by drop elaboration.
611 kind: TerminatorKind::Unreachable,
612 })
613 };
614 let drop_block = self.elaborator.patch().new_block(drop_block);
615
616 let loop_block = BasicBlockData {
617 statements: vec![
618 self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
619 copy(&Place::Local(cur)),
620 copy(length_or_end)))
621 ],
622 is_cleanup: unwind.is_cleanup(),
623 terminator: Some(Terminator {
624 source_info: self.source_info,
625 kind: TerminatorKind::if_(tcx, move_(can_go), succ, drop_block)
626 })
627 };
628 let loop_block = self.elaborator.patch().new_block(loop_block);
629
630 self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
631 location: ptr.clone().deref(),
632 target: loop_block,
633 unwind: unwind.into_option()
634 });
635
636 loop_block
637 }
638
639 fn open_drop_for_array(&mut self, ety: Ty<'tcx>, opt_size: Option<u64>) -> BasicBlock {
640 debug!("open_drop_for_array({:?}, {:?})", ety, opt_size);
641
642 // if size_of::<ety>() == 0 {
643 // index_based_loop
644 // } else {
645 // ptr_based_loop
646 // }
647
648 if let Some(size) = opt_size {
649 assert!(size <= (u32::MAX as u64),
650 "move out check doesn't implemented for array bigger then u32");
651 let size = size as u32;
652 let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size).map(|i| {
653 (self.place.clone().elem(ProjectionElem::ConstantIndex{
654 offset: i,
655 min_length: size,
656 from_end: false
657 }),
658 self.elaborator.array_subpath(self.path, i, size))
659 }).collect();
660
661 if fields.iter().any(|(_,path)| path.is_some()) {
662 let (succ, unwind) = self.drop_ladder_bottom();
663 return self.drop_ladder(fields, succ, unwind).0
664 }
665 }
666
667 let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
668 let tcx = self.tcx();
669 let size = &Place::Local(self.new_temp(tcx.types.usize));
670 let size_is_zero = &Place::Local(self.new_temp(tcx.types.bool));
671 let base_block = BasicBlockData {
672 statements: vec![
673 self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
674 self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
675 move_(size),
676 self.constant_usize(0)))
677 ],
678 is_cleanup: self.unwind.is_cleanup(),
679 terminator: Some(Terminator {
680 source_info: self.source_info,
681 kind: TerminatorKind::if_(
682 tcx,
683 move_(size_is_zero),
684 self.drop_loop_pair(ety, false),
685 self.drop_loop_pair(ety, true)
686 )
687 })
688 };
689 self.elaborator.patch().new_block(base_block)
690 }
691
692 // create a pair of drop-loops of `place`, which drops its contents
693 // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
694 // otherwise create an index loop.
695 fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
696 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
697 let tcx = self.tcx();
698 let iter_ty = if ptr_based {
699 tcx.mk_mut_ptr(ety)
700 } else {
701 tcx.types.usize
702 };
703
704 let cur = self.new_temp(iter_ty);
705 let length = Place::Local(self.new_temp(tcx.types.usize));
706 let length_or_end = if ptr_based {
707 Place::Local(self.new_temp(iter_ty))
708 } else {
709 length.clone()
710 };
711
712 let unwind = self.unwind.map(|unwind| {
713 self.drop_loop(unwind,
714 cur,
715 &length_or_end,
716 ety,
717 Unwind::InCleanup,
718 ptr_based)
719 });
720
721 let succ = self.succ; // FIXME(#43234)
722 let loop_block = self.drop_loop(
723 succ,
724 cur,
725 &length_or_end,
726 ety,
727 unwind,
728 ptr_based);
729
730 let cur = Place::Local(cur);
731 let zero = self.constant_usize(0);
732 let mut drop_block_stmts = vec![];
733 drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.place.clone())));
734 if ptr_based {
735 let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
736 let tmp = Place::Local(self.new_temp(tmp_ty));
737 // tmp = &P;
738 // cur = tmp as *mut T;
739 // end = Offset(cur, len);
740 drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
741 tcx.types.re_erased,
742 BorrowKind::Mut { allow_two_phase_borrow: false },
743 self.place.clone()
744 )));
745 drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
746 CastKind::Misc, Operand::Move(tmp.clone()), iter_ty
747 )));
748 drop_block_stmts.push(self.assign(&length_or_end,
749 Rvalue::BinaryOp(BinOp::Offset,
750 Operand::Copy(cur.clone()), Operand::Move(length.clone())
751 )));
752 } else {
753 // index = 0 (length already pushed)
754 drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
755 }
756 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
757 statements: drop_block_stmts,
758 is_cleanup: unwind.is_cleanup(),
759 terminator: Some(Terminator {
760 source_info: self.source_info,
761 kind: TerminatorKind::Goto { target: loop_block }
762 })
763 });
764
765 // FIXME(#34708): handle partially-dropped array/slice elements.
766 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
767 self.drop_flag_test_block(reset_block, succ, unwind)
768 }
769
770 /// The slow-path - create an "open", elaborated drop for a type
771 /// which is moved-out-of only partially, and patch `bb` to a jump
772 /// to it. This must not be called on ADTs with a destructor,
773 /// as these can't be moved-out-of, except for `Box<T>`, which is
774 /// special-cased.
775 ///
776 /// This creates a "drop ladder" that drops the needed fields of the
777 /// ADT, both in the success case or if one of the destructors fail.
778 fn open_drop<'a>(&mut self) -> BasicBlock {
779 let ty = self.place_ty(self.place);
780 match ty.sty {
781 ty::TyClosure(def_id, substs) => {
782 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
783 self.open_drop_for_tuple(&tys)
784 }
785 // Note that `elaborate_drops` only drops the upvars of a generator,
786 // and this is ok because `open_drop` here can only be reached
787 // within that own generator's resume function.
788 // This should only happen for the self argument on the resume function.
789 // It effetively only contains upvars until the generator transformation runs.
790 // See librustc_mir/transform/generator.rs for more details.
791 ty::TyGenerator(def_id, substs, _) => {
792 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
793 self.open_drop_for_tuple(&tys)
794 }
795 ty::TyTuple(tys) => {
796 self.open_drop_for_tuple(tys)
797 }
798 ty::TyAdt(def, substs) => {
799 if def.is_box() {
800 self.open_drop_for_box(def, substs)
801 } else {
802 self.open_drop_for_adt(def, substs)
803 }
804 }
805 ty::TyDynamic(..) => {
806 let unwind = self.unwind; // FIXME(#43234)
807 let succ = self.succ;
808 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
809 }
810 ty::TyArray(ety, size) => {
811 let size = size.assert_usize(self.tcx());
812 self.open_drop_for_array(ety, size)
813 },
814 ty::TySlice(ety) => self.open_drop_for_array(ety, None),
815
816 _ => bug!("open drop from non-ADT `{:?}`", ty)
817 }
818 }
819
820 /// Return a basic block that drop a place using the context
821 /// and path in `c`. If `mode` is something, also clear `c`
822 /// according to it.
823 ///
824 /// if FLAG(self.path)
825 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
826 /// drop(self.place)
827 fn complete_drop<'a>(&mut self,
828 drop_mode: Option<DropFlagMode>,
829 succ: BasicBlock,
830 unwind: Unwind) -> BasicBlock
831 {
832 debug!("complete_drop({:?},{:?})", self, drop_mode);
833
834 let drop_block = self.drop_block(succ, unwind);
835 let drop_block = if let Some(mode) = drop_mode {
836 self.drop_flag_reset_block(mode, drop_block, unwind)
837 } else {
838 drop_block
839 };
840
841 self.drop_flag_test_block(drop_block, succ, unwind)
842 }
843
844 fn drop_flag_reset_block(&mut self,
845 mode: DropFlagMode,
846 succ: BasicBlock,
847 unwind: Unwind) -> BasicBlock
848 {
849 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
850
851 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
852 let block_start = Location { block: block, statement_index: 0 };
853 self.elaborator.clear_drop_flag(block_start, self.path, mode);
854 block
855 }
856
857 fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
858 debug!("elaborated_drop_block({:?})", self);
859 let unwind = self.unwind; // FIXME(#43234)
860 let succ = self.succ;
861 let blk = self.drop_block(succ, unwind);
862 self.elaborate_drop(blk);
863 blk
864 }
865
866 fn box_free_block<'a>(
867 &mut self,
868 adt: &'tcx ty::AdtDef,
869 substs: &'tcx Substs<'tcx>,
870 target: BasicBlock,
871 unwind: Unwind,
872 ) -> BasicBlock {
873 let block = self.unelaborated_free_block(adt, substs, target, unwind);
874 self.drop_flag_test_block(block, target, unwind)
875 }
876
877 fn unelaborated_free_block<'a>(
878 &mut self,
879 adt: &'tcx ty::AdtDef,
880 substs: &'tcx Substs<'tcx>,
881 target: BasicBlock,
882 unwind: Unwind
883 ) -> BasicBlock {
884 let tcx = self.tcx();
885 let unit_temp = Place::Local(self.new_temp(tcx.mk_nil()));
886 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
887 let args = adt.variants[0].fields.iter().enumerate().map(|(i, f)| {
888 let field = Field::new(i);
889 let field_ty = f.ty(self.tcx(), substs);
890 Operand::Move(self.place.clone().field(field, field_ty))
891 }).collect();
892
893 let call = TerminatorKind::Call {
894 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
895 args: args,
896 destination: Some((unit_temp, target)),
897 cleanup: None
898 }; // FIXME(#43234)
899 let free_block = self.new_block(unwind, call);
900
901 let block_start = Location { block: free_block, statement_index: 0 };
902 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
903 free_block
904 }
905
906 fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
907 let block = TerminatorKind::Drop {
908 location: self.place.clone(),
909 target,
910 unwind: unwind.into_option()
911 };
912 self.new_block(unwind, block)
913 }
914
915 fn drop_flag_test_block(&mut self,
916 on_set: BasicBlock,
917 on_unset: BasicBlock,
918 unwind: Unwind)
919 -> BasicBlock
920 {
921 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
922 debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
923 self, on_set, on_unset, unwind, style);
924
925 match style {
926 DropStyle::Dead => on_unset,
927 DropStyle::Static => on_set,
928 DropStyle::Conditional | DropStyle::Open => {
929 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
930 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
931 self.new_block(unwind, term)
932 }
933 }
934 }
935
936 fn new_block<'a>(&mut self,
937 unwind: Unwind,
938 k: TerminatorKind<'tcx>)
939 -> BasicBlock
940 {
941 self.elaborator.patch().new_block(BasicBlockData {
942 statements: vec![],
943 terminator: Some(Terminator {
944 source_info: self.source_info, kind: k
945 }),
946 is_cleanup: unwind.is_cleanup()
947 })
948 }
949
950 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
951 self.elaborator.patch().new_temp(ty, self.source_info.span)
952 }
953
954 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
955 let mir = self.elaborator.mir();
956 self.elaborator.patch().terminator_loc(mir, bb)
957 }
958
959 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
960 Operand::Constant(box Constant {
961 span: self.source_info.span,
962 ty: self.tcx().types.usize,
963 literal: Literal::Value {
964 value: ty::Const::from_usize(self.tcx(), val.into())
965 }
966 })
967 }
968
969 fn assign(&self, lhs: &Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
970 Statement {
971 source_info: self.source_info,
972 kind: StatementKind::Assign(lhs.clone(), rhs)
973 }
974 }
975 }