]> git.proxmox.com Git - rustc.git/blame - src/librustc_mir/util/elaborate_drops.rs
New upstream version 1.21.0+dfsg1
[rustc.git] / src / librustc_mir / util / elaborate_drops.rs
CommitLineData
cc61c64b
XL
1// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11use std::fmt;
12use rustc::hir;
13use rustc::mir::*;
7cac9316 14use rustc::middle::const_val::{ConstInt, ConstVal};
cc61c64b
XL
15use rustc::middle::lang_items;
16use rustc::ty::{self, Ty};
17use rustc::ty::subst::{Kind, Substs};
18use rustc::ty::util::IntTypeExt;
19use rustc_data_structures::indexed_vec::Idx;
20use util::patch::MirPatch;
21
22use std::iter;
23
24#[derive(Debug, PartialEq, Eq, Copy, Clone)]
25pub enum DropFlagState {
26 Present, // i.e. initialized
27 Absent, // i.e. deinitialized or "moved"
28}
29
30impl DropFlagState {
31 pub fn value(self) -> bool {
32 match self {
33 DropFlagState::Present => true,
34 DropFlagState::Absent => false
35 }
36 }
37}
38
39#[derive(Debug)]
40pub enum DropStyle {
41 Dead,
42 Static,
43 Conditional,
44 Open,
45}
46
47#[derive(Debug)]
48pub enum DropFlagMode {
49 Shallow,
50 Deep
51}
52
7cac9316
XL
53#[derive(Copy, Clone, Debug)]
54pub enum Unwind {
55 To(BasicBlock),
56 InCleanup
57}
58
59impl Unwind {
60 fn is_cleanup(self) -> bool {
61 match self {
62 Unwind::To(..) => false,
63 Unwind::InCleanup => true
64 }
65 }
66
67 fn into_option(self) -> Option<BasicBlock> {
68 match self {
69 Unwind::To(bb) => Some(bb),
70 Unwind::InCleanup => None,
71 }
72 }
73
74 fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
75 match self {
76 Unwind::To(bb) => Unwind::To(f(bb)),
77 Unwind::InCleanup => Unwind::InCleanup
78 }
79 }
80}
81
cc61c64b
XL
82pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
83 type Path : Copy + fmt::Debug;
84
85 fn patch(&mut self) -> &mut MirPatch<'tcx>;
86 fn mir(&self) -> &'a Mir<'tcx>;
87 fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx>;
7cac9316 88 fn param_env(&self) -> ty::ParamEnv<'tcx>;
cc61c64b
XL
89
90 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
91 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
92 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
93
94
95 fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
96 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
97 fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path>;
98}
99
100#[derive(Debug)]
101struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
102 where D : DropElaborator<'b, 'tcx> + 'l
103{
104 elaborator: &'l mut D,
105
106 source_info: SourceInfo,
cc61c64b
XL
107
108 lvalue: &'l Lvalue<'tcx>,
109 path: D::Path,
110 succ: BasicBlock,
7cac9316 111 unwind: Unwind,
cc61c64b
XL
112}
113
114pub fn elaborate_drop<'b, 'tcx, D>(
115 elaborator: &mut D,
116 source_info: SourceInfo,
cc61c64b
XL
117 lvalue: &Lvalue<'tcx>,
118 path: D::Path,
119 succ: BasicBlock,
7cac9316 120 unwind: Unwind,
cc61c64b
XL
121 bb: BasicBlock)
122 where D: DropElaborator<'b, 'tcx>
123{
cc61c64b 124 DropCtxt {
7cac9316 125 elaborator, source_info, lvalue, path, succ, unwind
cc61c64b
XL
126 }.elaborate_drop(bb)
127}
128
129impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
130 where D: DropElaborator<'b, 'tcx>
131{
132 fn lvalue_ty(&self, lvalue: &Lvalue<'tcx>) -> Ty<'tcx> {
133 lvalue.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
134 }
135
136 fn tcx(&self) -> ty::TyCtxt<'b, 'tcx, 'tcx> {
137 self.elaborator.tcx()
138 }
139
140 /// This elaborates a single drop instruction, located at `bb`, and
141 /// patches over it.
142 ///
143 /// The elaborated drop checks the drop flags to only drop what
144 /// is initialized.
145 ///
146 /// In addition, the relevant drop flags also need to be cleared
147 /// to avoid double-drops. However, in the middle of a complex
148 /// drop, one must avoid clearing some of the flags before they
149 /// are read, as that would cause a memory leak.
150 ///
151 /// In particular, when dropping an ADT, multiple fields may be
152 /// joined together under the `rest` subpath. They are all controlled
153 /// by the primary drop flag, but only the last rest-field dropped
154 /// should clear it (and it must also not clear anything else).
155 ///
156 /// FIXME: I think we should just control the flags externally
157 /// and then we do not need this machinery.
158 pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
159 debug!("elaborate_drop({:?})", self);
160 let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
161 debug!("elaborate_drop({:?}): live - {:?}", self, style);
162 match style {
163 DropStyle::Dead => {
164 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
165 target: self.succ
166 });
167 }
168 DropStyle::Static => {
169 let loc = self.terminator_loc(bb);
170 self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
171 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
172 location: self.lvalue.clone(),
173 target: self.succ,
7cac9316 174 unwind: self.unwind.into_option(),
cc61c64b
XL
175 });
176 }
177 DropStyle::Conditional => {
7cac9316 178 let unwind = self.unwind; // FIXME(#6393)
cc61c64b 179 let succ = self.succ;
7cac9316 180 let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
cc61c64b
XL
181 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
182 target: drop_bb
183 });
184 }
185 DropStyle::Open => {
186 let drop_bb = self.open_drop();
187 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
188 target: drop_bb
189 });
190 }
191 }
192 }
193
194 /// Return the lvalue and move path for each field of `variant`,
195 /// (the move path is `None` if the field is a rest field).
196 fn move_paths_for_fields(&self,
197 base_lv: &Lvalue<'tcx>,
198 variant_path: D::Path,
199 variant: &'tcx ty::VariantDef,
200 substs: &'tcx Substs<'tcx>)
201 -> Vec<(Lvalue<'tcx>, Option<D::Path>)>
202 {
203 variant.fields.iter().enumerate().map(|(i, f)| {
204 let field = Field::new(i);
205 let subpath = self.elaborator.field_subpath(variant_path, field);
206
207 let field_ty =
208 self.tcx().normalize_associated_type_in_env(
209 &f.ty(self.tcx(), substs),
210 self.elaborator.param_env()
211 );
212 (base_lv.clone().field(field, field_ty), subpath)
213 }).collect()
214 }
215
216 fn drop_subpath(&mut self,
cc61c64b
XL
217 lvalue: &Lvalue<'tcx>,
218 path: Option<D::Path>,
219 succ: BasicBlock,
7cac9316 220 unwind: Unwind)
cc61c64b
XL
221 -> BasicBlock
222 {
223 if let Some(path) = path {
224 debug!("drop_subpath: for std field {:?}", lvalue);
225
226 DropCtxt {
227 elaborator: self.elaborator,
228 source_info: self.source_info,
7cac9316 229 path, lvalue, succ, unwind,
cc61c64b
XL
230 }.elaborated_drop_block()
231 } else {
232 debug!("drop_subpath: for rest field {:?}", lvalue);
233
234 DropCtxt {
235 elaborator: self.elaborator,
236 source_info: self.source_info,
7cac9316 237 lvalue, succ, unwind,
cc61c64b
XL
238 // Using `self.path` here to condition the drop on
239 // our own drop flag.
240 path: self.path
7cac9316 241 }.complete_drop(None, succ, unwind)
cc61c64b
XL
242 }
243 }
244
245 /// Create one-half of the drop ladder for a list of fields, and return
7cac9316
XL
246 /// the list of steps in it in reverse order, with the first step
247 /// dropping 0 fields and so on.
cc61c64b
XL
248 ///
249 /// `unwind_ladder` is such a list of steps in reverse order,
7cac9316
XL
250 /// which is called if the matching step of the drop glue panics.
251 fn drop_halfladder(&mut self,
252 unwind_ladder: &[Unwind],
253 mut succ: BasicBlock,
254 fields: &[(Lvalue<'tcx>, Option<D::Path>)])
255 -> Vec<BasicBlock>
cc61c64b 256 {
7cac9316
XL
257 Some(succ).into_iter().chain(
258 fields.iter().rev().zip(unwind_ladder)
259 .map(|(&(ref lv, path), &unwind_succ)| {
260 succ = self.drop_subpath(lv, path, succ, unwind_succ);
261 succ
262 })
263 ).collect()
264 }
cc61c64b 265
7cac9316
XL
266 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
267 // Clear the "master" drop flag at the end. This is needed
268 // because the "master" drop protects the ADT's discriminant,
269 // which is invalidated after the ADT is dropped.
270 let (succ, unwind) = (self.succ, self.unwind); // FIXME(#6393)
271 (
272 self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
273 unwind.map(|unwind| {
274 self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
275 })
276 )
cc61c64b
XL
277 }
278
279 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
280 ///
281 /// For example, with 3 fields, the drop ladder is
282 ///
283 /// .d0:
284 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
285 /// .d1:
286 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
287 /// .d2:
288 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
289 /// .c1:
290 /// ELAB(drop location.1 [target=.c2])
291 /// .c2:
292 /// ELAB(drop location.2 [target=`self.unwind`])
7cac9316
XL
293 ///
294 /// NOTE: this does not clear the master drop flag, so you need
295 /// to point succ/unwind on a `drop_ladder_bottom`.
cc61c64b 296 fn drop_ladder<'a>(&mut self,
7cac9316
XL
297 fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>,
298 succ: BasicBlock,
299 unwind: Unwind)
300 -> (BasicBlock, Unwind)
cc61c64b
XL
301 {
302 debug!("drop_ladder({:?}, {:?})", self, fields);
303
304 let mut fields = fields;
305 fields.retain(|&(ref lvalue, _)| {
306 self.lvalue_ty(lvalue).needs_drop(self.tcx(), self.elaborator.param_env())
307 });
308
309 debug!("drop_ladder - fields needing drop: {:?}", fields);
310
7cac9316
XL
311 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
312 let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
313 let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
314 halfladder.into_iter().map(Unwind::To).collect()
cc61c64b 315 } else {
7cac9316 316 unwind_ladder
cc61c64b
XL
317 };
318
cc61c64b 319 let normal_ladder =
7cac9316 320 self.drop_halfladder(&unwind_ladder, succ, &fields);
cc61c64b 321
7cac9316 322 (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
cc61c64b
XL
323 }
324
325 fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
326 -> BasicBlock
327 {
328 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
329
330 let fields = tys.iter().enumerate().map(|(i, &ty)| {
331 (self.lvalue.clone().field(Field::new(i), ty),
332 self.elaborator.field_subpath(self.path, Field::new(i)))
333 }).collect();
334
7cac9316
XL
335 let (succ, unwind) = self.drop_ladder_bottom();
336 self.drop_ladder(fields, succ, unwind).0
cc61c64b
XL
337 }
338
339 fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
340 {
341 debug!("open_drop_for_box({:?}, {:?})", self, ty);
342
343 let interior = self.lvalue.clone().deref();
344 let interior_path = self.elaborator.deref_subpath(self.path);
345
346 let succ = self.succ; // FIXME(#6393)
7cac9316
XL
347 let unwind = self.unwind;
348 let succ = self.box_free_block(ty, succ, unwind);
349 let unwind_succ = self.unwind.map(|unwind| {
350 self.box_free_block(ty, unwind, Unwind::InCleanup)
cc61c64b
XL
351 });
352
7cac9316 353 self.drop_subpath(&interior, interior_path, succ, unwind_succ)
cc61c64b
XL
354 }
355
356 fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
357 -> BasicBlock {
358 debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
359 if adt.variants.len() == 0 {
360 return self.elaborator.patch().new_block(BasicBlockData {
361 statements: vec![],
362 terminator: Some(Terminator {
363 source_info: self.source_info,
364 kind: TerminatorKind::Unreachable
365 }),
7cac9316 366 is_cleanup: self.unwind.is_cleanup()
cc61c64b
XL
367 });
368 }
369
370 let contents_drop = if adt.is_union() {
371 (self.succ, self.unwind)
372 } else {
373 self.open_drop_for_adt_contents(adt, substs)
374 };
375
376 if adt.has_dtor(self.tcx()) {
377 self.destructor_call_block(contents_drop)
378 } else {
379 contents_drop.0
380 }
381 }
382
7cac9316
XL
383 fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
384 substs: &'tcx Substs<'tcx>)
385 -> (BasicBlock, Unwind) {
386 let (succ, unwind) = self.drop_ladder_bottom();
387 if adt.variants.len() == 1 {
388 let fields = self.move_paths_for_fields(
389 self.lvalue,
390 self.path,
391 &adt.variants[0],
392 substs
393 );
394 self.drop_ladder(fields, succ, unwind)
395 } else {
396 self.open_drop_for_multivariant(adt, substs, succ, unwind)
397 }
398 }
399
400 fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
401 substs: &'tcx Substs<'tcx>,
402 succ: BasicBlock,
403 unwind: Unwind)
404 -> (BasicBlock, Unwind) {
405 let mut values = Vec::with_capacity(adt.variants.len());
406 let mut normal_blocks = Vec::with_capacity(adt.variants.len());
407 let mut unwind_blocks = if unwind.is_cleanup() {
408 None
409 } else {
410 Some(Vec::with_capacity(adt.variants.len()))
411 };
cc61c64b 412
7cac9316
XL
413 let mut have_otherwise = false;
414
415 for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
416 let subpath = self.elaborator.downcast_subpath(
417 self.path, variant_index);
418 if let Some(variant_path) = subpath {
419 let base_lv = self.lvalue.clone().elem(
420 ProjectionElem::Downcast(adt, variant_index)
cc61c64b 421 );
7cac9316
XL
422 let fields = self.move_paths_for_fields(
423 &base_lv,
424 variant_path,
425 &adt.variants[variant_index],
426 substs);
427 values.push(discr);
428 if let Unwind::To(unwind) = unwind {
429 // We can't use the half-ladder from the original
430 // drop ladder, because this breaks the
431 // "funclet can't have 2 successor funclets"
432 // requirement from MSVC:
433 //
434 // switch unwind-switch
435 // / \ / \
436 // v1.0 v2.0 v2.0-unwind v1.0-unwind
437 // | | / |
438 // v1.1-unwind v2.1-unwind |
439 // ^ |
440 // \-------------------------------/
441 //
442 // Create a duplicate half-ladder to avoid that. We
443 // could technically only do this on MSVC, but I
444 // I want to minimize the divergence between MSVC
445 // and non-MSVC.
446
447 let unwind_blocks = unwind_blocks.as_mut().unwrap();
448 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
449 let halfladder =
450 self.drop_halfladder(&unwind_ladder, unwind, &fields);
451 unwind_blocks.push(halfladder.last().cloned().unwrap());
cc61c64b 452 }
7cac9316
XL
453 let (normal, _) = self.drop_ladder(fields, succ, unwind);
454 normal_blocks.push(normal);
455 } else {
456 have_otherwise = true;
457 }
458 }
cc61c64b 459
7cac9316
XL
460 if have_otherwise {
461 normal_blocks.push(self.drop_block(succ, unwind));
462 if let Unwind::To(unwind) = unwind {
463 unwind_blocks.as_mut().unwrap().push(
464 self.drop_block(unwind, Unwind::InCleanup)
465 );
cc61c64b 466 }
7cac9316
XL
467 } else {
468 values.pop();
cc61c64b 469 }
7cac9316
XL
470
471 (self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
472 unwind.map(|unwind| {
473 self.adt_switch_block(
474 adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
475 )
476 }))
cc61c64b
XL
477 }
478
479 fn adt_switch_block(&mut self,
cc61c64b
XL
480 adt: &'tcx ty::AdtDef,
481 blocks: Vec<BasicBlock>,
482 values: &[ConstInt],
7cac9316
XL
483 succ: BasicBlock,
484 unwind: Unwind)
cc61c64b
XL
485 -> BasicBlock {
486 // If there are multiple variants, then if something
487 // is present within the enum the discriminant, tracked
488 // by the rest path, must be initialized.
489 //
490 // Additionally, we do not want to switch on the
491 // discriminant after it is free-ed, because that
492 // way lies only trouble.
493 let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
494 let discr = Lvalue::Local(self.new_temp(discr_ty));
495 let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
7cac9316
XL
496 let switch_block = BasicBlockData {
497 statements: vec![self.assign(&discr, discr_rv)],
cc61c64b
XL
498 terminator: Some(Terminator {
499 source_info: self.source_info,
500 kind: TerminatorKind::SwitchInt {
501 discr: Operand::Consume(discr),
502 switch_ty: discr_ty,
503 values: From::from(values.to_owned()),
504 targets: blocks,
505 }
506 }),
7cac9316
XL
507 is_cleanup: unwind.is_cleanup(),
508 };
509 let switch_block = self.elaborator.patch().new_block(switch_block);
510 self.drop_flag_test_block(switch_block, succ, unwind)
cc61c64b
XL
511 }
512
7cac9316 513 fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
cc61c64b
XL
514 -> BasicBlock
515 {
516 debug!("destructor_call_block({:?}, {:?})", self, succ);
517 let tcx = self.tcx();
518 let drop_trait = tcx.lang_items.drop_trait().unwrap();
519 let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
520 let ty = self.lvalue_ty(self.lvalue);
521 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
522
523 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
3b2f2976 524 ty,
cc61c64b
XL
525 mutbl: hir::Mutability::MutMutable
526 });
527 let ref_lvalue = self.new_temp(ref_ty);
528 let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
529
7cac9316
XL
530 let result = BasicBlockData {
531 statements: vec![self.assign(
532 &Lvalue::Local(ref_lvalue),
533 Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
534 )],
cc61c64b
XL
535 terminator: Some(Terminator {
536 kind: TerminatorKind::Call {
537 func: Operand::function_handle(tcx, drop_fn.def_id, substs,
538 self.source_info.span),
539 args: vec![Operand::Consume(Lvalue::Local(ref_lvalue))],
540 destination: Some((unit_temp, succ)),
7cac9316 541 cleanup: unwind.into_option(),
cc61c64b
XL
542 },
543 source_info: self.source_info
544 }),
7cac9316
XL
545 is_cleanup: unwind.is_cleanup(),
546 };
547 self.elaborator.patch().new_block(result)
548 }
549
550 /// create a loop that drops an array:
551 ///
552
553 ///
554 /// loop-block:
555 /// can_go = cur == length_or_end
556 /// if can_go then succ else drop-block
557 /// drop-block:
558 /// if ptr_based {
559 /// ptr = cur
560 /// cur = cur.offset(1)
561 /// } else {
562 /// ptr = &mut LV[cur]
563 /// cur = cur + 1
564 /// }
565 /// drop(ptr)
566 fn drop_loop(&mut self,
567 succ: BasicBlock,
568 cur: &Lvalue<'tcx>,
569 length_or_end: &Lvalue<'tcx>,
570 ety: Ty<'tcx>,
571 unwind: Unwind,
572 ptr_based: bool)
573 -> BasicBlock
574 {
575 let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
576 let tcx = self.tcx();
577
578 let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
579 ty: ety,
580 mutbl: hir::Mutability::MutMutable
581 });
582 let ptr = &Lvalue::Local(self.new_temp(ref_ty));
583 let can_go = &Lvalue::Local(self.new_temp(tcx.types.bool));
584
585 let one = self.constant_usize(1);
586 let (ptr_next, cur_next) = if ptr_based {
587 (Rvalue::Use(use_(cur)),
588 Rvalue::BinaryOp(BinOp::Offset, use_(cur), one))
589 } else {
590 (Rvalue::Ref(
591 tcx.types.re_erased,
592 BorrowKind::Mut,
593 self.lvalue.clone().index(use_(cur))),
594 Rvalue::BinaryOp(BinOp::Add, use_(cur), one))
595 };
596
597 let drop_block = BasicBlockData {
598 statements: vec![
599 self.assign(ptr, ptr_next),
600 self.assign(cur, cur_next)
601 ],
602 is_cleanup: unwind.is_cleanup(),
603 terminator: Some(Terminator {
604 source_info: self.source_info,
605 // this gets overwritten by drop elaboration.
606 kind: TerminatorKind::Unreachable,
607 })
608 };
609 let drop_block = self.elaborator.patch().new_block(drop_block);
610
611 let loop_block = BasicBlockData {
612 statements: vec![
613 self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
614 use_(cur),
615 use_(length_or_end)))
616 ],
617 is_cleanup: unwind.is_cleanup(),
618 terminator: Some(Terminator {
619 source_info: self.source_info,
620 kind: TerminatorKind::if_(tcx, use_(can_go), succ, drop_block)
621 })
622 };
623 let loop_block = self.elaborator.patch().new_block(loop_block);
624
625 self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
626 location: ptr.clone().deref(),
627 target: loop_block,
628 unwind: unwind.into_option()
629 });
630
631 loop_block
632 }
633
634 fn open_drop_for_array(&mut self, ety: Ty<'tcx>) -> BasicBlock {
635 debug!("open_drop_for_array({:?})", ety);
636
637 // if size_of::<ety>() == 0 {
638 // index_based_loop
639 // } else {
640 // ptr_based_loop
641 // }
642
643 let tcx = self.tcx();
644
645 let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
646 let size = &Lvalue::Local(self.new_temp(tcx.types.usize));
647 let size_is_zero = &Lvalue::Local(self.new_temp(tcx.types.bool));
648 let base_block = BasicBlockData {
649 statements: vec![
650 self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
651 self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
652 use_(size),
653 self.constant_usize(0)))
654 ],
655 is_cleanup: self.unwind.is_cleanup(),
656 terminator: Some(Terminator {
657 source_info: self.source_info,
658 kind: TerminatorKind::if_(
659 tcx,
660 use_(size_is_zero),
661 self.drop_loop_pair(ety, false),
662 self.drop_loop_pair(ety, true)
663 )
664 })
665 };
666 self.elaborator.patch().new_block(base_block)
667 }
668
669 // create a pair of drop-loops of `lvalue`, which drops its contents
670 // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
671 // otherwise create an index loop.
672 fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
673 debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
674 let tcx = self.tcx();
675 let iter_ty = if ptr_based {
676 tcx.mk_mut_ptr(ety)
677 } else {
678 tcx.types.usize
679 };
680
681 let cur = Lvalue::Local(self.new_temp(iter_ty));
682 let length = Lvalue::Local(self.new_temp(tcx.types.usize));
683 let length_or_end = if ptr_based {
684 Lvalue::Local(self.new_temp(iter_ty))
685 } else {
686 length.clone()
687 };
688
689 let unwind = self.unwind.map(|unwind| {
690 self.drop_loop(unwind,
691 &cur,
692 &length_or_end,
693 ety,
694 Unwind::InCleanup,
695 ptr_based)
696 });
697
698 let succ = self.succ; // FIXME(#6393)
699 let loop_block = self.drop_loop(
700 succ,
701 &cur,
702 &length_or_end,
703 ety,
704 unwind,
705 ptr_based);
706
707 let zero = self.constant_usize(0);
708 let mut drop_block_stmts = vec![];
709 drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.lvalue.clone())));
710 if ptr_based {
711 let tmp_ty = tcx.mk_mut_ptr(self.lvalue_ty(self.lvalue));
712 let tmp = Lvalue::Local(self.new_temp(tmp_ty));
713 // tmp = &LV;
714 // cur = tmp as *mut T;
715 // end = Offset(cur, len);
716 drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
717 tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone()
718 )));
719 drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
720 CastKind::Misc, Operand::Consume(tmp.clone()), iter_ty
721 )));
722 drop_block_stmts.push(self.assign(&length_or_end,
723 Rvalue::BinaryOp(BinOp::Offset,
724 Operand::Consume(cur.clone()), Operand::Consume(length.clone())
725 )));
726 } else {
727 // index = 0 (length already pushed)
728 drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
729 }
730 let drop_block = self.elaborator.patch().new_block(BasicBlockData {
731 statements: drop_block_stmts,
732 is_cleanup: unwind.is_cleanup(),
733 terminator: Some(Terminator {
734 source_info: self.source_info,
735 kind: TerminatorKind::Goto { target: loop_block }
736 })
737 });
738
739 // FIXME(#34708): handle partially-dropped array/slice elements.
740 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
741 self.drop_flag_test_block(reset_block, succ, unwind)
cc61c64b
XL
742 }
743
744 /// The slow-path - create an "open", elaborated drop for a type
745 /// which is moved-out-of only partially, and patch `bb` to a jump
746 /// to it. This must not be called on ADTs with a destructor,
747 /// as these can't be moved-out-of, except for `Box<T>`, which is
748 /// special-cased.
749 ///
750 /// This creates a "drop ladder" that drops the needed fields of the
751 /// ADT, both in the success case or if one of the destructors fail.
752 fn open_drop<'a>(&mut self) -> BasicBlock {
753 let ty = self.lvalue_ty(self.lvalue);
cc61c64b
XL
754 match ty.sty {
755 ty::TyClosure(def_id, substs) => {
756 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
757 self.open_drop_for_tuple(&tys)
758 }
759 ty::TyTuple(tys, _) => {
760 self.open_drop_for_tuple(tys)
761 }
762 ty::TyAdt(def, _) if def.is_box() => {
763 self.open_drop_for_box(ty.boxed_ty())
764 }
765 ty::TyAdt(def, substs) => {
766 self.open_drop_for_adt(def, substs)
767 }
768 ty::TyDynamic(..) => {
7cac9316
XL
769 let unwind = self.unwind; // FIXME(#6393)
770 let succ = self.succ;
771 self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
cc61c64b 772 }
7cac9316
XL
773 ty::TyArray(ety, _) | ty::TySlice(ety) => {
774 self.open_drop_for_array(ety)
cc61c64b
XL
775 }
776 _ => bug!("open drop from non-ADT `{:?}`", ty)
777 }
778 }
779
780 /// Return a basic block that drop an lvalue using the context
781 /// and path in `c`. If `mode` is something, also clear `c`
782 /// according to it.
783 ///
784 /// if FLAG(self.path)
785 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
786 /// drop(self.lv)
787 fn complete_drop<'a>(&mut self,
cc61c64b 788 drop_mode: Option<DropFlagMode>,
7cac9316
XL
789 succ: BasicBlock,
790 unwind: Unwind) -> BasicBlock
cc61c64b
XL
791 {
792 debug!("complete_drop({:?},{:?})", self, drop_mode);
793
7cac9316
XL
794 let drop_block = self.drop_block(succ, unwind);
795 let drop_block = if let Some(mode) = drop_mode {
796 self.drop_flag_reset_block(mode, drop_block, unwind)
797 } else {
798 drop_block
799 };
800
801 self.drop_flag_test_block(drop_block, succ, unwind)
802 }
cc61c64b 803
7cac9316
XL
804 fn drop_flag_reset_block(&mut self,
805 mode: DropFlagMode,
806 succ: BasicBlock,
807 unwind: Unwind) -> BasicBlock
808 {
809 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
810
811 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
812 let block_start = Location { block: block, statement_index: 0 };
813 self.elaborator.clear_drop_flag(block_start, self.path, mode);
814 block
cc61c64b
XL
815 }
816
817 fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
818 debug!("elaborated_drop_block({:?})", self);
7cac9316 819 let unwind = self.unwind; // FIXME(#6393)
cc61c64b 820 let succ = self.succ;
7cac9316 821 let blk = self.drop_block(succ, unwind);
cc61c64b
XL
822 self.elaborate_drop(blk);
823 blk
824 }
825
826 fn box_free_block<'a>(
827 &mut self,
828 ty: Ty<'tcx>,
829 target: BasicBlock,
7cac9316 830 unwind: Unwind,
cc61c64b 831 ) -> BasicBlock {
7cac9316
XL
832 let block = self.unelaborated_free_block(ty, target, unwind);
833 self.drop_flag_test_block(block, target, unwind)
cc61c64b
XL
834 }
835
836 fn unelaborated_free_block<'a>(
837 &mut self,
838 ty: Ty<'tcx>,
839 target: BasicBlock,
7cac9316 840 unwind: Unwind
cc61c64b
XL
841 ) -> BasicBlock {
842 let tcx = self.tcx();
843 let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
844 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
845 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
846
847 let call = TerminatorKind::Call {
848 func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
849 args: vec![Operand::Consume(self.lvalue.clone())],
850 destination: Some((unit_temp, target)),
851 cleanup: None
852 }; // FIXME(#6393)
7cac9316 853 let free_block = self.new_block(unwind, call);
cc61c64b
XL
854
855 let block_start = Location { block: free_block, statement_index: 0 };
856 self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
857 free_block
858 }
859
7cac9316 860 fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
cc61c64b
XL
861 let block = TerminatorKind::Drop {
862 location: self.lvalue.clone(),
3b2f2976 863 target,
7cac9316 864 unwind: unwind.into_option()
cc61c64b 865 };
7cac9316 866 self.new_block(unwind, block)
cc61c64b
XL
867 }
868
869 fn drop_flag_test_block(&mut self,
cc61c64b 870 on_set: BasicBlock,
7cac9316
XL
871 on_unset: BasicBlock,
872 unwind: Unwind)
cc61c64b
XL
873 -> BasicBlock
874 {
875 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
7cac9316
XL
876 debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
877 self, on_set, on_unset, unwind, style);
cc61c64b
XL
878
879 match style {
880 DropStyle::Dead => on_unset,
881 DropStyle::Static => on_set,
882 DropStyle::Conditional | DropStyle::Open => {
883 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
884 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
7cac9316 885 self.new_block(unwind, term)
cc61c64b
XL
886 }
887 }
888 }
889
890 fn new_block<'a>(&mut self,
7cac9316 891 unwind: Unwind,
cc61c64b
XL
892 k: TerminatorKind<'tcx>)
893 -> BasicBlock
894 {
895 self.elaborator.patch().new_block(BasicBlockData {
896 statements: vec![],
897 terminator: Some(Terminator {
898 source_info: self.source_info, kind: k
899 }),
7cac9316 900 is_cleanup: unwind.is_cleanup()
cc61c64b
XL
901 })
902 }
903
904 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
905 self.elaborator.patch().new_temp(ty, self.source_info.span)
906 }
907
908 fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
909 let mir = self.elaborator.mir();
910 self.elaborator.patch().terminator_loc(mir, bb)
911 }
7cac9316
XL
912
913 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
914 Operand::Constant(box Constant {
915 span: self.source_info.span,
916 ty: self.tcx().types.usize,
917 literal: Literal::Value { value: ConstVal::Integral(self.tcx().const_usize(val)) }
918 })
919 }
920
921 fn assign(&self, lhs: &Lvalue<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
922 Statement {
923 source_info: self.source_info,
924 kind: StatementKind::Assign(lhs.clone(), rhs)
925 }
926 }
cc61c64b 927}