]> git.proxmox.com Git - rustc.git/blame - src/librustc_borrowck/borrowck/mir/elaborate_drops.rs
New upstream version 1.17.0+dfsg2
[rustc.git] / src / librustc_borrowck / borrowck / mir / elaborate_drops.rs
CommitLineData
3157f602
XL
1// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
32a655c1 11use super::gather_moves::{HasMoveData, MoveData, MovePathIndex, LookupResult};
3157f602
XL
12use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
13use super::dataflow::{DataflowResults};
14use super::{drop_flag_effects_for_location, on_all_children_bits};
9e0c209e 15use super::on_lookup_result_bits;
3157f602
XL
16use super::{DropFlagState, MoveDataParamEnv};
17use super::patch::MirPatch;
18use rustc::ty::{self, Ty, TyCtxt};
9e0c209e 19use rustc::ty::subst::{Kind, Subst, Substs};
8bb4bdeb 20use rustc::ty::util::IntTypeExt;
c30ab7b3 21use rustc::mir::*;
3157f602
XL
22use rustc::mir::transform::{Pass, MirPass, MirSource};
23use rustc::middle::const_val::ConstVal;
24use rustc::middle::lang_items;
476ff2be 25use rustc::util::nodemap::FxHashMap;
c30ab7b3 26use rustc_data_structures::indexed_set::IdxSetBuf;
3157f602
XL
27use rustc_data_structures::indexed_vec::Idx;
28use syntax_pos::Span;
29
30use std::fmt;
9e0c209e 31use std::iter;
3157f602
XL
32use std::u32;
33
34pub struct ElaborateDrops;
35
36impl<'tcx> MirPass<'tcx> for ElaborateDrops {
37 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
38 src: MirSource, mir: &mut Mir<'tcx>)
39 {
40 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
41 match src {
42 MirSource::Fn(..) => {},
43 _ => return
44 }
45 let id = src.item_id();
46 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
9e0c209e 47 let move_data = MoveData::gather_moves(mir, tcx, &param_env);
3157f602
XL
48 let elaborate_patch = {
49 let mir = &*mir;
50 let env = MoveDataParamEnv {
51 move_data: move_data,
52 param_env: param_env
53 };
54 let flow_inits =
32a655c1
SL
55 super::do_dataflow(tcx, mir, id, &[],
56 MaybeInitializedLvals::new(tcx, mir, &env),
57 |bd, p| &bd.move_data().move_paths[p]);
3157f602 58 let flow_uninits =
32a655c1
SL
59 super::do_dataflow(tcx, mir, id, &[],
60 MaybeUninitializedLvals::new(tcx, mir, &env),
61 |bd, p| &bd.move_data().move_paths[p]);
3157f602
XL
62
63 ElaborateDropsCtxt {
64 tcx: tcx,
65 mir: mir,
66 env: &env,
67 flow_inits: flow_inits,
68 flow_uninits: flow_uninits,
476ff2be 69 drop_flags: FxHashMap(),
3157f602
XL
70 patch: MirPatch::new(mir),
71 }.elaborate()
72 };
73 elaborate_patch.apply(mir);
74 }
75}
76
77impl Pass for ElaborateDrops {}
78
79struct InitializationData {
80 live: IdxSetBuf<MovePathIndex>,
81 dead: IdxSetBuf<MovePathIndex>
82}
83
84impl InitializationData {
85 fn apply_location<'a,'tcx>(&mut self,
86 tcx: TyCtxt<'a, 'tcx, 'tcx>,
87 mir: &Mir<'tcx>,
88 env: &MoveDataParamEnv<'tcx>,
89 loc: Location)
90 {
91 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
92 debug!("at location {:?}: setting {:?} to {:?}",
93 loc, path, df);
94 match df {
95 DropFlagState::Present => {
96 self.live.add(&path);
97 self.dead.remove(&path);
98 }
99 DropFlagState::Absent => {
100 self.dead.add(&path);
101 self.live.remove(&path);
102 }
103 }
104 });
105 }
106
107 fn state(&self, path: MovePathIndex) -> (bool, bool) {
108 (self.live.contains(&path), self.dead.contains(&path))
109 }
110}
111
112impl fmt::Debug for InitializationData {
113 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
114 Ok(())
115 }
116}
117
118struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
119 tcx: TyCtxt<'a, 'tcx, 'tcx>,
120 mir: &'a Mir<'tcx>,
121 env: &'a MoveDataParamEnv<'tcx>,
122 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
123 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
476ff2be 124 drop_flags: FxHashMap<MovePathIndex, Local>,
3157f602
XL
125 patch: MirPatch<'tcx>,
126}
127
128#[derive(Copy, Clone, Debug)]
129struct DropCtxt<'a, 'tcx: 'a> {
130 source_info: SourceInfo,
131 is_cleanup: bool,
132
133 init_data: &'a InitializationData,
134
135 lvalue: &'a Lvalue<'tcx>,
136 path: MovePathIndex,
137 succ: BasicBlock,
138 unwind: Option<BasicBlock>
139}
140
141impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
142 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
143 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
144 &self.env.param_env
145 }
146
147 fn initialization_data_at(&self, loc: Location) -> InitializationData {
148 let mut data = InitializationData {
149 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
150 .to_owned(),
151 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
152 .to_owned(),
153 };
9e0c209e 154 for stmt in 0..loc.statement_index {
3157f602 155 data.apply_location(self.tcx, self.mir, self.env,
9e0c209e 156 Location { block: loc.block, statement_index: stmt });
3157f602
XL
157 }
158 data
159 }
160
161 fn create_drop_flag(&mut self, index: MovePathIndex) {
162 let tcx = self.tcx;
163 let patch = &mut self.patch;
8bb4bdeb 164 debug!("create_drop_flag({:?})", self.mir.span);
3157f602
XL
165 self.drop_flags.entry(index).or_insert_with(|| {
166 patch.new_temp(tcx.types.bool)
167 });
168 }
169
170 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
c30ab7b3 171 self.drop_flags.get(&index).map(|t| Lvalue::Local(*t))
3157f602
XL
172 }
173
174 /// create a patch that elaborates all drops in the input
175 /// MIR.
176 fn elaborate(mut self) -> MirPatch<'tcx>
177 {
178 self.collect_drop_flags();
179
180 self.elaborate_drops();
181
182 self.drop_flags_on_init();
183 self.drop_flags_for_fn_rets();
184 self.drop_flags_for_args();
185 self.drop_flags_for_locs();
186
187 self.patch
188 }
189
190 fn path_needs_drop(&self, path: MovePathIndex) -> bool
191 {
9e0c209e
SL
192 let lvalue = &self.move_data().move_paths[path].lvalue;
193 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
194 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
3157f602 195
9e0c209e 196 self.tcx.type_needs_drop_given_env(ty, self.param_env())
3157f602
XL
197 }
198
199 fn collect_drop_flags(&mut self)
200 {
201 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
202 let terminator = data.terminator();
203 let location = match terminator.kind {
204 TerminatorKind::Drop { ref location, .. } |
205 TerminatorKind::DropAndReplace { ref location, .. } => location,
206 _ => continue
207 };
208
3157f602
XL
209 let init_data = self.initialization_data_at(Location {
210 block: bb,
9e0c209e 211 statement_index: data.statements.len()
3157f602
XL
212 });
213
214 let path = self.move_data().rev_lookup.find(location);
9e0c209e 215 debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
3157f602
XL
216 bb, location, path);
217
9e0c209e
SL
218 let path = match path {
219 LookupResult::Exact(e) => e,
220 LookupResult::Parent(None) => continue,
221 LookupResult::Parent(Some(parent)) => {
222 let (_maybe_live, maybe_dead) = init_data.state(parent);
223 if maybe_dead {
224 span_bug!(terminator.source_info.span,
225 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
226 bb, location, path);
227 }
228 continue
229 }
230 };
231
3157f602
XL
232 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
233 if self.path_needs_drop(child) {
234 let (maybe_live, maybe_dead) = init_data.state(child);
235 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
236 child, location, path, (maybe_live, maybe_dead));
237 if maybe_live && maybe_dead {
238 self.create_drop_flag(child)
239 }
240 }
241 });
242 }
243 }
244
245 fn elaborate_drops(&mut self)
246 {
247 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
9e0c209e 248 let loc = Location { block: bb, statement_index: data.statements.len() };
3157f602
XL
249 let terminator = data.terminator();
250
251 let resume_block = self.patch.resume_block();
252 match terminator.kind {
253 TerminatorKind::Drop { ref location, target, unwind } => {
254 let init_data = self.initialization_data_at(loc);
9e0c209e
SL
255 match self.move_data().rev_lookup.find(location) {
256 LookupResult::Exact(path) => {
257 self.elaborate_drop(&DropCtxt {
258 source_info: terminator.source_info,
259 is_cleanup: data.is_cleanup,
260 init_data: &init_data,
261 lvalue: location,
262 path: path,
263 succ: target,
264 unwind: if data.is_cleanup {
265 None
266 } else {
267 Some(Option::unwrap_or(unwind, resume_block))
268 }
269 }, bb);
3157f602 270 }
9e0c209e
SL
271 LookupResult::Parent(..) => {
272 span_bug!(terminator.source_info.span,
273 "drop of untracked value {:?}", bb);
274 }
275 }
3157f602
XL
276 }
277 TerminatorKind::DropAndReplace { ref location, ref value,
278 target, unwind } =>
279 {
280 assert!(!data.is_cleanup);
281
282 self.elaborate_replace(
283 loc,
284 location, value,
285 target, unwind
286 );
287 }
288 _ => continue
289 }
290 }
291 }
292
293 /// Elaborate a MIR `replace` terminator. This instruction
294 /// is not directly handled by translation, and therefore
295 /// must be desugared.
296 ///
297 /// The desugaring drops the location if needed, and then writes
298 /// the value (including setting the drop flag) over it in *both* arms.
299 ///
300 /// The `replace` terminator can also be called on lvalues that
301 /// are not tracked by elaboration (for example,
302 /// `replace x[i] <- tmp0`). The borrow checker requires that
303 /// these locations are initialized before the assignment,
304 /// so we just generate an unconditional drop.
305 fn elaborate_replace(
306 &mut self,
307 loc: Location,
308 location: &Lvalue<'tcx>,
309 value: &Operand<'tcx>,
310 target: BasicBlock,
311 unwind: Option<BasicBlock>)
312 {
313 let bb = loc.block;
314 let data = &self.mir[bb];
315 let terminator = data.terminator();
316
317 let assign = Statement {
318 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
319 source_info: terminator.source_info
320 };
321
322 let unwind = unwind.unwrap_or(self.patch.resume_block());
323 let unwind = self.patch.new_block(BasicBlockData {
324 statements: vec![assign.clone()],
325 terminator: Some(Terminator {
326 kind: TerminatorKind::Goto { target: unwind },
327 ..*terminator
328 }),
329 is_cleanup: true
330 });
331
332 let target = self.patch.new_block(BasicBlockData {
333 statements: vec![assign],
334 terminator: Some(Terminator {
335 kind: TerminatorKind::Goto { target: target },
336 ..*terminator
337 }),
338 is_cleanup: data.is_cleanup,
339 });
340
9e0c209e
SL
341 match self.move_data().rev_lookup.find(location) {
342 LookupResult::Exact(path) => {
343 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
344 let init_data = self.initialization_data_at(loc);
345
346 self.elaborate_drop(&DropCtxt {
347 source_info: terminator.source_info,
348 is_cleanup: data.is_cleanup,
349 init_data: &init_data,
350 lvalue: location,
351 path: path,
352 succ: target,
353 unwind: Some(unwind)
354 }, bb);
355 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
356 self.set_drop_flag(Location { block: target, statement_index: 0 },
357 child, DropFlagState::Present);
358 self.set_drop_flag(Location { block: unwind, statement_index: 0 },
359 child, DropFlagState::Present);
360 });
361 }
362 LookupResult::Parent(parent) => {
363 // drop and replace behind a pointer/array/whatever. The location
364 // must be initialized.
365 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
366 self.patch.patch_terminator(bb, TerminatorKind::Drop {
367 location: location.clone(),
368 target: target,
369 unwind: Some(unwind)
370 });
371 }
3157f602
XL
372 }
373 }
374
375 /// This elaborates a single drop instruction, located at `bb`, and
376 /// patches over it.
377 ///
378 /// The elaborated drop checks the drop flags to only drop what
379 /// is initialized.
380 ///
381 /// In addition, the relevant drop flags also need to be cleared
382 /// to avoid double-drops. However, in the middle of a complex
383 /// drop, one must avoid clearing some of the flags before they
384 /// are read, as that would cause a memory leak.
385 ///
386 /// In particular, when dropping an ADT, multiple fields may be
387 /// joined together under the `rest` subpath. They are all controlled
388 /// by the primary drop flag, but only the last rest-field dropped
389 /// should clear it (and it must also not clear anything else).
390 ///
391 /// FIXME: I think we should just control the flags externally
392 /// and then we do not need this machinery.
393 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
394 debug!("elaborate_drop({:?})", c);
395
396 let mut some_live = false;
397 let mut some_dead = false;
398 let mut children_count = 0;
399 on_all_children_bits(
400 self.tcx, self.mir, self.move_data(),
401 c.path, |child| {
402 if self.path_needs_drop(child) {
403 let (live, dead) = c.init_data.state(child);
404 debug!("elaborate_drop: state({:?}) = {:?}",
405 child, (live, dead));
406 some_live |= live;
407 some_dead |= dead;
408 children_count += 1;
409 }
410 });
411
412 debug!("elaborate_drop({:?}): live - {:?}", c,
413 (some_live, some_dead));
414 match (some_live, some_dead) {
415 (false, false) | (false, true) => {
416 // dead drop - patch it out
417 self.patch.patch_terminator(bb, TerminatorKind::Goto {
418 target: c.succ
419 });
420 }
421 (true, false) => {
422 // static drop - just set the flag
423 self.patch.patch_terminator(bb, TerminatorKind::Drop {
424 location: c.lvalue.clone(),
425 target: c.succ,
426 unwind: c.unwind
427 });
428 self.drop_flags_for_drop(c, bb);
429 }
430 (true, true) => {
431 // dynamic drop
432 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
433 self.conditional_drop(c)
434 } else {
435 self.open_drop(c)
436 };
437 self.patch.patch_terminator(bb, TerminatorKind::Goto {
438 target: drop_bb
439 });
440 }
441 }
442 }
443
444 /// Return the lvalue and move path for each field of `variant`,
445 /// (the move path is `None` if the field is a rest field).
446 fn move_paths_for_fields(&self,
447 base_lv: &Lvalue<'tcx>,
448 variant_path: MovePathIndex,
476ff2be 449 variant: &'tcx ty::VariantDef,
3157f602
XL
450 substs: &'tcx Substs<'tcx>)
451 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
452 {
3157f602
XL
453 variant.fields.iter().enumerate().map(|(i, f)| {
454 let subpath =
9e0c209e 455 super::move_path_children_matching(self.move_data(), variant_path, |p| {
3157f602
XL
456 match p {
457 &Projection {
458 elem: ProjectionElem::Field(idx, _), ..
459 } => idx.index() == i,
460 _ => false
461 }
462 });
463
464 let field_ty =
465 self.tcx.normalize_associated_type_in_env(
466 &f.ty(self.tcx, substs),
467 self.param_env()
468 );
469 (base_lv.clone().field(Field::new(i), field_ty), subpath)
470 }).collect()
471 }
472
473 /// Create one-half of the drop ladder for a list of fields, and return
474 /// the list of steps in it in reverse order.
475 ///
476 /// `unwind_ladder` is such a list of steps in reverse order,
477 /// which is called instead of the next step if the drop unwinds
478 /// (the first field is never reached). If it is `None`, all
479 /// unwind targets are left blank.
480 fn drop_halfladder<'a>(&mut self,
481 c: &DropCtxt<'a, 'tcx>,
482 unwind_ladder: Option<Vec<BasicBlock>>,
483 succ: BasicBlock,
484 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
485 is_cleanup: bool)
486 -> Vec<BasicBlock>
487 {
3157f602
XL
488 let mut unwind_succ = if is_cleanup {
489 None
490 } else {
491 c.unwind
492 };
476ff2be
SL
493
494 let mut succ = self.new_block(
495 c, c.is_cleanup, TerminatorKind::Goto { target: succ }
496 );
497
498 // Always clear the "master" drop flag at the bottom of the
499 // ladder. This is needed because the "master" drop flag
500 // protects the ADT's discriminant, which is invalidated
501 // after the ADT is dropped.
502 self.set_drop_flag(
503 Location { block: succ, statement_index: 0 },
504 c.path,
505 DropFlagState::Absent
506 );
3157f602
XL
507
508 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
476ff2be
SL
509 succ = if let Some(path) = path {
510 debug!("drop_ladder: for std field {} ({:?})", i, lv);
511
512 self.elaborated_drop_block(&DropCtxt {
513 source_info: c.source_info,
514 is_cleanup: is_cleanup,
515 init_data: c.init_data,
516 lvalue: lv,
517 path: path,
518 succ: succ,
519 unwind: unwind_succ,
520 })
521 } else {
522 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
523
524 self.complete_drop(&DropCtxt {
525 source_info: c.source_info,
526 is_cleanup: is_cleanup,
527 init_data: c.init_data,
528 lvalue: lv,
529 path: c.path,
530 succ: succ,
531 unwind: unwind_succ,
532 }, false)
3157f602
XL
533 };
534
3157f602 535 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
476ff2be 536 succ
3157f602
XL
537 }).collect()
538 }
539
540 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
541 ///
542 /// For example, with 3 fields, the drop ladder is
543 ///
544 /// .d0:
545 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
546 /// .d1:
547 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
548 /// .d2:
549 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
550 /// .c1:
551 /// ELAB(drop location.1 [target=.c2])
552 /// .c2:
553 /// ELAB(drop location.2 [target=`c.unwind])
554 fn drop_ladder<'a>(&mut self,
555 c: &DropCtxt<'a, 'tcx>,
556 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
557 -> BasicBlock
558 {
559 debug!("drop_ladder({:?}, {:?})", c, fields);
560
561 let mut fields = fields;
562 fields.retain(|&(ref lvalue, _)| {
5bcae85e 563 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
3157f602
XL
564 self.tcx.type_needs_drop_given_env(ty, self.param_env())
565 });
566
567 debug!("drop_ladder - fields needing drop: {:?}", fields);
568
569 let unwind_ladder = if c.is_cleanup {
570 None
571 } else {
572 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
573 };
574
575 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
576 .last().cloned().unwrap_or(c.succ)
577 }
578
579 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
580 -> BasicBlock
581 {
582 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
583
584 let fields = tys.iter().enumerate().map(|(i, &ty)| {
585 (c.lvalue.clone().field(Field::new(i), ty),
586 super::move_path_children_matching(
9e0c209e 587 self.move_data(), c.path, |proj| match proj {
3157f602
XL
588 &Projection {
589 elem: ProjectionElem::Field(f, _), ..
590 } => f.index() == i,
591 _ => false
592 }
593 ))
594 }).collect();
595
596 self.drop_ladder(c, fields)
597 }
598
599 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
600 -> BasicBlock
601 {
602 debug!("open_drop_for_box({:?}, {:?})", c, ty);
603
604 let interior_path = super::move_path_children_matching(
9e0c209e 605 self.move_data(), c.path, |proj| match proj {
3157f602
XL
606 &Projection { elem: ProjectionElem::Deref, .. } => true,
607 _ => false
608 }).unwrap();
609
610 let interior = c.lvalue.clone().deref();
611 let inner_c = DropCtxt {
612 lvalue: &interior,
613 unwind: c.unwind.map(|u| {
614 self.box_free_block(c, ty, u, true)
615 }),
616 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
617 path: interior_path,
618 ..*c
619 };
620
621 self.elaborated_drop_block(&inner_c)
622 }
623
3157f602 624 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
476ff2be 625 adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
3157f602
XL
626 -> BasicBlock {
627 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
628
3157f602
XL
629 match adt.variants.len() {
630 1 => {
631 let fields = self.move_paths_for_fields(
632 c.lvalue,
633 c.path,
634 &adt.variants[0],
635 substs
636 );
637 self.drop_ladder(c, fields)
638 }
639 _ => {
8bb4bdeb
XL
640 let mut values = Vec::with_capacity(adt.variants.len());
641 let mut blocks = Vec::with_capacity(adt.variants.len());
642 let mut otherwise = None;
643 for (variant_index, discr) in adt.discriminants(self.tcx).enumerate() {
644 let subpath = super::move_path_children_matching(
645 self.move_data(), c.path, |proj| match proj {
646 &Projection {
647 elem: ProjectionElem::Downcast(_, idx), ..
648 } => idx == variant_index,
649 _ => false
650 });
651 if let Some(variant_path) = subpath {
652 let base_lv = c.lvalue.clone().elem(
653 ProjectionElem::Downcast(adt, variant_index)
654 );
655 let fields = self.move_paths_for_fields(
656 &base_lv,
657 variant_path,
658 &adt.variants[variant_index],
659 substs);
660 values.push(discr);
661 blocks.push(self.drop_ladder(c, fields));
662 } else {
663 // variant not found - drop the entire enum
664 if let None = otherwise {
665 otherwise = Some(self.complete_drop(c, true));
666 }
667 }
668 }
669 if let Some(block) = otherwise {
670 blocks.push(block);
671 } else {
672 values.pop();
673 }
3157f602
XL
674 // If there are multiple variants, then if something
675 // is present within the enum the discriminant, tracked
676 // by the rest path, must be initialized.
677 //
678 // Additionally, we do not want to switch on the
679 // discriminant after it is free-ed, because that
680 // way lies only trouble.
8bb4bdeb
XL
681 let discr_ty = adt.repr.discr_type().to_ty(self.tcx);
682 let discr = Lvalue::Local(self.patch.new_temp(discr_ty));
683 let switch_block = self.patch.new_block(BasicBlockData {
684 statements: vec![
685 Statement {
686 source_info: c.source_info,
687 kind: StatementKind::Assign(discr.clone(),
688 Rvalue::Discriminant(c.lvalue.clone()))
689 }
690 ],
691 terminator: Some(Terminator {
692 source_info: c.source_info,
693 kind: TerminatorKind::SwitchInt {
694 discr: Operand::Consume(discr),
695 switch_ty: discr_ty,
696 values: From::from(values),
697 targets: blocks,
698 }
699 }),
700 is_cleanup: c.is_cleanup,
701 });
3157f602
XL
702 self.drop_flag_test_block(c, switch_block)
703 }
704 }
705 }
706
707 /// The slow-path - create an "open", elaborated drop for a type
708 /// which is moved-out-of only partially, and patch `bb` to a jump
709 /// to it. This must not be called on ADTs with a destructor,
710 /// as these can't be moved-out-of, except for `Box<T>`, which is
711 /// special-cased.
712 ///
713 /// This creates a "drop ladder" that drops the needed fields of the
714 /// ADT, both in the success case or if one of the destructors fail.
715 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
5bcae85e 716 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
3157f602 717 match ty.sty {
476ff2be
SL
718 ty::TyClosure(def_id, substs) => {
719 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
720 self.open_drop_for_tuple(c, &tys)
721 }
8bb4bdeb 722 ty::TyTuple(tys, _) => {
3157f602
XL
723 self.open_drop_for_tuple(c, tys)
724 }
32a655c1
SL
725 ty::TyAdt(def, _) if def.is_box() => {
726 self.open_drop_for_box(c, ty.boxed_ty())
727 }
728 ty::TyAdt(def, substs) => {
729 self.open_drop_for_adt(c, def, substs)
3157f602
XL
730 }
731 _ => bug!("open drop from non-ADT `{:?}`", ty)
732 }
733 }
734
735 /// Return a basic block that drop an lvalue using the context
736 /// and path in `c`. If `update_drop_flag` is true, also
737 /// clear `c`.
738 ///
739 /// if FLAG(c.path)
740 /// if(update_drop_flag) FLAG(c.path) = false
741 /// drop(c.lv)
742 fn complete_drop<'a>(
743 &mut self,
744 c: &DropCtxt<'a, 'tcx>,
745 update_drop_flag: bool)
746 -> BasicBlock
747 {
748 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
749
750 let drop_block = self.drop_block(c);
751 if update_drop_flag {
752 self.set_drop_flag(
9e0c209e 753 Location { block: drop_block, statement_index: 0 },
3157f602
XL
754 c.path,
755 DropFlagState::Absent
756 );
757 }
758
759 self.drop_flag_test_block(c, drop_block)
760 }
761
762 /// Create a simple conditional drop.
763 ///
764 /// if FLAG(c.lv)
765 /// FLAGS(c.lv) = false
766 /// drop(c.lv)
767 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
768 -> BasicBlock
769 {
770 debug!("conditional_drop({:?})", c);
771 let drop_bb = self.drop_block(c);
772 self.drop_flags_for_drop(c, drop_bb);
773
774 self.drop_flag_test_block(c, drop_bb)
775 }
776
777 fn new_block<'a>(&mut self,
778 c: &DropCtxt<'a, 'tcx>,
779 is_cleanup: bool,
780 k: TerminatorKind<'tcx>)
781 -> BasicBlock
782 {
783 self.patch.new_block(BasicBlockData {
784 statements: vec![],
785 terminator: Some(Terminator {
786 source_info: c.source_info, kind: k
787 }),
788 is_cleanup: is_cleanup
789 })
790 }
791
792 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
793 debug!("elaborated_drop_block({:?})", c);
794 let blk = self.drop_block(c);
795 self.elaborate_drop(c, blk);
796 blk
797 }
798
799 fn drop_flag_test_block<'a>(&mut self,
800 c: &DropCtxt<'a, 'tcx>,
801 on_set: BasicBlock)
802 -> BasicBlock {
803 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
804 }
805
806 fn drop_flag_test_block_with_succ<'a>(&mut self,
807 c: &DropCtxt<'a, 'tcx>,
808 is_cleanup: bool,
809 on_set: BasicBlock,
810 on_unset: BasicBlock)
811 -> BasicBlock
812 {
813 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
814 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
815 c, is_cleanup, on_set, (maybe_live, maybe_dead));
816
817 match (maybe_live, maybe_dead) {
818 (false, _) => on_unset,
819 (true, false) => on_set,
820 (true, true) => {
821 let flag = self.drop_flag(c.path).unwrap();
8bb4bdeb
XL
822 let term = TerminatorKind::if_(self.tcx, Operand::Consume(flag), on_set, on_unset);
823 self.new_block(c, is_cleanup, term)
3157f602
XL
824 }
825 }
826 }
827
828 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
829 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
830 location: c.lvalue.clone(),
831 target: c.succ,
832 unwind: c.unwind
833 })
834 }
835
836 fn box_free_block<'a>(
837 &mut self,
838 c: &DropCtxt<'a, 'tcx>,
839 ty: Ty<'tcx>,
840 target: BasicBlock,
841 is_cleanup: bool
842 ) -> BasicBlock {
843 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
844 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
845 }
846
847 fn unelaborated_free_block<'a>(
848 &mut self,
849 c: &DropCtxt<'a, 'tcx>,
850 ty: Ty<'tcx>,
851 target: BasicBlock,
852 is_cleanup: bool
853 ) -> BasicBlock {
854 let mut statements = vec![];
855 if let Some(&flag) = self.drop_flags.get(&c.path) {
856 statements.push(Statement {
857 source_info: c.source_info,
858 kind: StatementKind::Assign(
c30ab7b3 859 Lvalue::Local(flag),
3157f602
XL
860 self.constant_bool(c.source_info.span, false)
861 )
862 });
863 }
864
865 let tcx = self.tcx;
c30ab7b3 866 let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
476ff2be 867 let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
c30ab7b3 868 let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
476ff2be 869 let fty = tcx.item_type(free_func).subst(tcx, substs);
3157f602
XL
870
871 self.patch.new_block(BasicBlockData {
872 statements: statements,
873 terminator: Some(Terminator {
874 source_info: c.source_info, kind: TerminatorKind::Call {
875 func: Operand::Constant(Constant {
876 span: c.source_info.span,
877 ty: fty,
878 literal: Literal::Item {
879 def_id: free_func,
880 substs: substs
881 }
882 }),
883 args: vec![Operand::Consume(c.lvalue.clone())],
884 destination: Some((unit_temp, target)),
885 cleanup: None
886 }
887 }),
888 is_cleanup: is_cleanup
889 })
890 }
891
892 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
893 // if we have a destuctor, we must *not* split the drop.
894
895 // dataflow can create unneeded children in some cases
896 // - be sure to ignore them.
897
5bcae85e 898 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
3157f602
XL
899
900 match ty.sty {
9e0c209e 901 ty::TyAdt(def, _) => {
8bb4bdeb 902 if def.has_dtor(self.tcx) && !def.is_box() {
3157f602
XL
903 self.tcx.sess.span_warn(
904 c.source_info.span,
905 &format!("dataflow bug??? moving out of type with dtor {:?}",
906 c));
907 true
908 } else {
909 false
910 }
911 }
912 _ => false
913 }
914 }
915
916 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
917 Rvalue::Use(Operand::Constant(Constant {
918 span: span,
919 ty: self.tcx.types.bool,
920 literal: Literal::Value { value: ConstVal::Bool(val) }
921 }))
922 }
923
924 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
925 if let Some(&flag) = self.drop_flags.get(&path) {
926 let span = self.patch.source_info_for_location(self.mir, loc).span;
927 let val = self.constant_bool(span, val.value());
c30ab7b3 928 self.patch.add_assign(loc, Lvalue::Local(flag), val);
3157f602
XL
929 }
930 }
931
932 fn drop_flags_on_init(&mut self) {
9e0c209e 933 let loc = Location { block: START_BLOCK, statement_index: 0 };
3157f602
XL
934 let span = self.patch.source_info_for_location(self.mir, loc).span;
935 let false_ = self.constant_bool(span, false);
936 for flag in self.drop_flags.values() {
c30ab7b3 937 self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone());
3157f602
XL
938 }
939 }
940
941 fn drop_flags_for_fn_rets(&mut self) {
942 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
943 if let TerminatorKind::Call {
944 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
945 } = data.terminator().kind {
946 assert!(!self.patch.is_patched(bb));
947
9e0c209e 948 let loc = Location { block: tgt, statement_index: 0 };
3157f602 949 let path = self.move_data().rev_lookup.find(lv);
9e0c209e 950 on_lookup_result_bits(
3157f602
XL
951 self.tcx, self.mir, self.move_data(), path,
952 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
953 );
954 }
955 }
956 }
957
958 fn drop_flags_for_args(&mut self) {
9e0c209e 959 let loc = Location { block: START_BLOCK, statement_index: 0 };
3157f602
XL
960 super::drop_flag_effects_for_function_entry(
961 self.tcx, self.mir, self.env, |path, ds| {
962 self.set_drop_flag(loc, path, ds);
963 }
964 )
965 }
966
967 fn drop_flags_for_locs(&mut self) {
968 // We intentionally iterate only over the *old* basic blocks.
969 //
970 // Basic blocks created by drop elaboration update their
971 // drop flags by themselves, to avoid the drop flags being
972 // clobbered before they are read.
973
974 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
975 debug!("drop_flags_for_locs({:?})", data);
976 for i in 0..(data.statements.len()+1) {
977 debug!("drop_flag_for_locs: stmt {}", i);
978 let mut allow_initializations = true;
979 if i == data.statements.len() {
980 match data.terminator().kind {
981 TerminatorKind::Drop { .. } => {
982 // drop elaboration should handle that by itself
983 continue
984 }
985 TerminatorKind::DropAndReplace { .. } => {
986 // this contains the move of the source and
987 // the initialization of the destination. We
988 // only want the former - the latter is handled
989 // by the elaboration code and must be done
990 // *after* the destination is dropped.
991 assert!(self.patch.is_patched(bb));
992 allow_initializations = false;
993 }
994 _ => {
995 assert!(!self.patch.is_patched(bb));
996 }
997 }
998 }
9e0c209e 999 let loc = Location { block: bb, statement_index: i };
3157f602
XL
1000 super::drop_flag_effects_for_location(
1001 self.tcx, self.mir, self.env, loc, |path, ds| {
1002 if ds == DropFlagState::Absent || allow_initializations {
1003 self.set_drop_flag(loc, path, ds)
1004 }
1005 }
1006 )
1007 }
1008
1009 // There may be a critical edge after this call,
1010 // so mark the return as initialized *before* the
1011 // call.
1012 if let TerminatorKind::Call {
1013 destination: Some((ref lv, _)), cleanup: None, ..
1014 } = data.terminator().kind {
1015 assert!(!self.patch.is_patched(bb));
1016
9e0c209e 1017 let loc = Location { block: bb, statement_index: data.statements.len() };
3157f602 1018 let path = self.move_data().rev_lookup.find(lv);
9e0c209e 1019 on_lookup_result_bits(
3157f602
XL
1020 self.tcx, self.mir, self.move_data(), path,
1021 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1022 );
1023 }
1024 }
1025 }
1026
1027 fn drop_flags_for_drop<'a>(&mut self,
1028 c: &DropCtxt<'a, 'tcx>,
1029 bb: BasicBlock)
1030 {
1031 let loc = self.patch.terminator_loc(self.mir, bb);
1032 on_all_children_bits(
1033 self.tcx, self.mir, self.move_data(), c.path,
1034 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)
1035 );
1036 }
1037}