]> git.proxmox.com Git - rustc.git/blame - src/librustc_borrowck/borrowck/mir/elaborate_drops.rs
Imported Upstream version 1.11.0+dfsg1
[rustc.git] / src / librustc_borrowck / borrowck / mir / elaborate_drops.rs
CommitLineData
3157f602
XL
1// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
11use indexed_set::IdxSetBuf;
12use super::gather_moves::{MoveData, MovePathIndex, MovePathContent, Location};
13use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
14use super::dataflow::{DataflowResults};
15use super::{drop_flag_effects_for_location, on_all_children_bits};
16use super::{DropFlagState, MoveDataParamEnv};
17use super::patch::MirPatch;
18use rustc::ty::{self, Ty, TyCtxt};
19use rustc::ty::subst::{Subst, Substs, VecPerParamSpace};
20use rustc::mir::repr::*;
21use rustc::mir::transform::{Pass, MirPass, MirSource};
22use rustc::middle::const_val::ConstVal;
23use rustc::middle::lang_items;
24use rustc::util::nodemap::FnvHashMap;
25use rustc_data_structures::indexed_vec::Idx;
26use syntax_pos::Span;
27
28use std::fmt;
29use std::u32;
30
31pub struct ElaborateDrops;
32
33impl<'tcx> MirPass<'tcx> for ElaborateDrops {
34 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
35 src: MirSource, mir: &mut Mir<'tcx>)
36 {
37 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
38 match src {
39 MirSource::Fn(..) => {},
40 _ => return
41 }
42 let id = src.item_id();
43 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
44 let move_data = MoveData::gather_moves(mir, tcx);
45 let elaborate_patch = {
46 let mir = &*mir;
47 let env = MoveDataParamEnv {
48 move_data: move_data,
49 param_env: param_env
50 };
51 let flow_inits =
52 super::do_dataflow(tcx, mir, id, &[], &env,
53 MaybeInitializedLvals::new(tcx, mir));
54 let flow_uninits =
55 super::do_dataflow(tcx, mir, id, &[], &env,
56 MaybeUninitializedLvals::new(tcx, mir));
57
58 ElaborateDropsCtxt {
59 tcx: tcx,
60 mir: mir,
61 env: &env,
62 flow_inits: flow_inits,
63 flow_uninits: flow_uninits,
64 drop_flags: FnvHashMap(),
65 patch: MirPatch::new(mir),
66 }.elaborate()
67 };
68 elaborate_patch.apply(mir);
69 }
70}
71
72impl Pass for ElaborateDrops {}
73
74struct InitializationData {
75 live: IdxSetBuf<MovePathIndex>,
76 dead: IdxSetBuf<MovePathIndex>
77}
78
79impl InitializationData {
80 fn apply_location<'a,'tcx>(&mut self,
81 tcx: TyCtxt<'a, 'tcx, 'tcx>,
82 mir: &Mir<'tcx>,
83 env: &MoveDataParamEnv<'tcx>,
84 loc: Location)
85 {
86 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
87 debug!("at location {:?}: setting {:?} to {:?}",
88 loc, path, df);
89 match df {
90 DropFlagState::Present => {
91 self.live.add(&path);
92 self.dead.remove(&path);
93 }
94 DropFlagState::Absent => {
95 self.dead.add(&path);
96 self.live.remove(&path);
97 }
98 }
99 });
100 }
101
102 fn state(&self, path: MovePathIndex) -> (bool, bool) {
103 (self.live.contains(&path), self.dead.contains(&path))
104 }
105}
106
107impl fmt::Debug for InitializationData {
108 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
109 Ok(())
110 }
111}
112
113struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
114 tcx: TyCtxt<'a, 'tcx, 'tcx>,
115 mir: &'a Mir<'tcx>,
116 env: &'a MoveDataParamEnv<'tcx>,
117 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
118 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
119 drop_flags: FnvHashMap<MovePathIndex, Temp>,
120 patch: MirPatch<'tcx>,
121}
122
123#[derive(Copy, Clone, Debug)]
124struct DropCtxt<'a, 'tcx: 'a> {
125 source_info: SourceInfo,
126 is_cleanup: bool,
127
128 init_data: &'a InitializationData,
129
130 lvalue: &'a Lvalue<'tcx>,
131 path: MovePathIndex,
132 succ: BasicBlock,
133 unwind: Option<BasicBlock>
134}
135
136impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
137 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
138 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
139 &self.env.param_env
140 }
141
142 fn initialization_data_at(&self, loc: Location) -> InitializationData {
143 let mut data = InitializationData {
144 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
145 .to_owned(),
146 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
147 .to_owned(),
148 };
149 for stmt in 0..loc.index {
150 data.apply_location(self.tcx, self.mir, self.env,
151 Location { block: loc.block, index: stmt });
152 }
153 data
154 }
155
156 fn create_drop_flag(&mut self, index: MovePathIndex) {
157 let tcx = self.tcx;
158 let patch = &mut self.patch;
159 self.drop_flags.entry(index).or_insert_with(|| {
160 patch.new_temp(tcx.types.bool)
161 });
162 }
163
164 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
165 self.drop_flags.get(&index).map(|t| Lvalue::Temp(*t))
166 }
167
168 /// create a patch that elaborates all drops in the input
169 /// MIR.
170 fn elaborate(mut self) -> MirPatch<'tcx>
171 {
172 self.collect_drop_flags();
173
174 self.elaborate_drops();
175
176 self.drop_flags_on_init();
177 self.drop_flags_for_fn_rets();
178 self.drop_flags_for_args();
179 self.drop_flags_for_locs();
180
181 self.patch
182 }
183
184 fn path_needs_drop(&self, path: MovePathIndex) -> bool
185 {
186 match self.move_data().move_paths[path].content {
187 MovePathContent::Lvalue(ref lvalue) => {
188 let ty = self.mir.lvalue_ty(self.tcx, lvalue).to_ty(self.tcx);
189 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
190
191 self.tcx.type_needs_drop_given_env(ty, self.param_env())
192 }
193 _ => false
194 }
195 }
196
197 /// Returns whether this lvalue is tracked by drop elaboration. This
198 /// includes all lvalues, except these (1.) behind references or arrays,
199 /// or (2.) behind ADT's with a Drop impl.
200 fn lvalue_is_tracked(&self, lv: &Lvalue<'tcx>) -> bool
201 {
202 // `lvalue_contents_drop_state_cannot_differ` only compares
203 // the `lv` to its immediate contents, while this recursively
204 // follows parent chain formed by `base` of each projection.
205 if let &Lvalue::Projection(ref data) = lv {
206 !super::lvalue_contents_drop_state_cannot_differ(self.tcx, self.mir, &data.base) &&
207 self.lvalue_is_tracked(&data.base)
208 } else {
209 true
210 }
211 }
212
213 fn collect_drop_flags(&mut self)
214 {
215 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
216 let terminator = data.terminator();
217 let location = match terminator.kind {
218 TerminatorKind::Drop { ref location, .. } |
219 TerminatorKind::DropAndReplace { ref location, .. } => location,
220 _ => continue
221 };
222
223 if !self.lvalue_is_tracked(location) {
224 continue
225 }
226
227 let init_data = self.initialization_data_at(Location {
228 block: bb,
229 index: data.statements.len()
230 });
231
232 let path = self.move_data().rev_lookup.find(location);
233 debug!("collect_drop_flags: {:?}, lv {:?} (index {:?})",
234 bb, location, path);
235
236 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
237 if self.path_needs_drop(child) {
238 let (maybe_live, maybe_dead) = init_data.state(child);
239 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
240 child, location, path, (maybe_live, maybe_dead));
241 if maybe_live && maybe_dead {
242 self.create_drop_flag(child)
243 }
244 }
245 });
246 }
247 }
248
249 fn elaborate_drops(&mut self)
250 {
251 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
252 let loc = Location { block: bb, index: data.statements.len() };
253 let terminator = data.terminator();
254
255 let resume_block = self.patch.resume_block();
256 match terminator.kind {
257 TerminatorKind::Drop { ref location, target, unwind } => {
258 let init_data = self.initialization_data_at(loc);
259 let path = self.move_data().rev_lookup.find(location);
260 self.elaborate_drop(&DropCtxt {
261 source_info: terminator.source_info,
262 is_cleanup: data.is_cleanup,
263 init_data: &init_data,
264 lvalue: location,
265 path: path,
266 succ: target,
267 unwind: if data.is_cleanup {
268 None
269 } else {
270 Some(Option::unwrap_or(unwind, resume_block))
271 }
272 }, bb);
273 }
274 TerminatorKind::DropAndReplace { ref location, ref value,
275 target, unwind } =>
276 {
277 assert!(!data.is_cleanup);
278
279 self.elaborate_replace(
280 loc,
281 location, value,
282 target, unwind
283 );
284 }
285 _ => continue
286 }
287 }
288 }
289
290 /// Elaborate a MIR `replace` terminator. This instruction
291 /// is not directly handled by translation, and therefore
292 /// must be desugared.
293 ///
294 /// The desugaring drops the location if needed, and then writes
295 /// the value (including setting the drop flag) over it in *both* arms.
296 ///
297 /// The `replace` terminator can also be called on lvalues that
298 /// are not tracked by elaboration (for example,
299 /// `replace x[i] <- tmp0`). The borrow checker requires that
300 /// these locations are initialized before the assignment,
301 /// so we just generate an unconditional drop.
302 fn elaborate_replace(
303 &mut self,
304 loc: Location,
305 location: &Lvalue<'tcx>,
306 value: &Operand<'tcx>,
307 target: BasicBlock,
308 unwind: Option<BasicBlock>)
309 {
310 let bb = loc.block;
311 let data = &self.mir[bb];
312 let terminator = data.terminator();
313
314 let assign = Statement {
315 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
316 source_info: terminator.source_info
317 };
318
319 let unwind = unwind.unwrap_or(self.patch.resume_block());
320 let unwind = self.patch.new_block(BasicBlockData {
321 statements: vec![assign.clone()],
322 terminator: Some(Terminator {
323 kind: TerminatorKind::Goto { target: unwind },
324 ..*terminator
325 }),
326 is_cleanup: true
327 });
328
329 let target = self.patch.new_block(BasicBlockData {
330 statements: vec![assign],
331 terminator: Some(Terminator {
332 kind: TerminatorKind::Goto { target: target },
333 ..*terminator
334 }),
335 is_cleanup: data.is_cleanup,
336 });
337
338 if !self.lvalue_is_tracked(location) {
339 // drop and replace behind a pointer/array/whatever. The location
340 // must be initialized.
341 debug!("elaborate_drop_and_replace({:?}) - untracked", terminator);
342 self.patch.patch_terminator(bb, TerminatorKind::Drop {
343 location: location.clone(),
344 target: target,
345 unwind: Some(unwind)
346 });
347 } else {
348 debug!("elaborate_drop_and_replace({:?}) - tracked", terminator);
349 let init_data = self.initialization_data_at(loc);
350 let path = self.move_data().rev_lookup.find(location);
351
352 self.elaborate_drop(&DropCtxt {
353 source_info: terminator.source_info,
354 is_cleanup: data.is_cleanup,
355 init_data: &init_data,
356 lvalue: location,
357 path: path,
358 succ: target,
359 unwind: Some(unwind)
360 }, bb);
361 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
362 self.set_drop_flag(Location { block: target, index: 0 },
363 child, DropFlagState::Present);
364 self.set_drop_flag(Location { block: unwind, index: 0 },
365 child, DropFlagState::Present);
366 });
367 }
368 }
369
370 /// This elaborates a single drop instruction, located at `bb`, and
371 /// patches over it.
372 ///
373 /// The elaborated drop checks the drop flags to only drop what
374 /// is initialized.
375 ///
376 /// In addition, the relevant drop flags also need to be cleared
377 /// to avoid double-drops. However, in the middle of a complex
378 /// drop, one must avoid clearing some of the flags before they
379 /// are read, as that would cause a memory leak.
380 ///
381 /// In particular, when dropping an ADT, multiple fields may be
382 /// joined together under the `rest` subpath. They are all controlled
383 /// by the primary drop flag, but only the last rest-field dropped
384 /// should clear it (and it must also not clear anything else).
385 ///
386 /// FIXME: I think we should just control the flags externally
387 /// and then we do not need this machinery.
388 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
389 debug!("elaborate_drop({:?})", c);
390
391 let mut some_live = false;
392 let mut some_dead = false;
393 let mut children_count = 0;
394 on_all_children_bits(
395 self.tcx, self.mir, self.move_data(),
396 c.path, |child| {
397 if self.path_needs_drop(child) {
398 let (live, dead) = c.init_data.state(child);
399 debug!("elaborate_drop: state({:?}) = {:?}",
400 child, (live, dead));
401 some_live |= live;
402 some_dead |= dead;
403 children_count += 1;
404 }
405 });
406
407 debug!("elaborate_drop({:?}): live - {:?}", c,
408 (some_live, some_dead));
409 match (some_live, some_dead) {
410 (false, false) | (false, true) => {
411 // dead drop - patch it out
412 self.patch.patch_terminator(bb, TerminatorKind::Goto {
413 target: c.succ
414 });
415 }
416 (true, false) => {
417 // static drop - just set the flag
418 self.patch.patch_terminator(bb, TerminatorKind::Drop {
419 location: c.lvalue.clone(),
420 target: c.succ,
421 unwind: c.unwind
422 });
423 self.drop_flags_for_drop(c, bb);
424 }
425 (true, true) => {
426 // dynamic drop
427 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
428 self.conditional_drop(c)
429 } else {
430 self.open_drop(c)
431 };
432 self.patch.patch_terminator(bb, TerminatorKind::Goto {
433 target: drop_bb
434 });
435 }
436 }
437 }
438
439 /// Return the lvalue and move path for each field of `variant`,
440 /// (the move path is `None` if the field is a rest field).
441 fn move_paths_for_fields(&self,
442 base_lv: &Lvalue<'tcx>,
443 variant_path: MovePathIndex,
444 variant: ty::VariantDef<'tcx>,
445 substs: &'tcx Substs<'tcx>)
446 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
447 {
448 let move_paths = &self.move_data().move_paths;
449 variant.fields.iter().enumerate().map(|(i, f)| {
450 let subpath =
451 super::move_path_children_matching(move_paths, variant_path, |p| {
452 match p {
453 &Projection {
454 elem: ProjectionElem::Field(idx, _), ..
455 } => idx.index() == i,
456 _ => false
457 }
458 });
459
460 let field_ty =
461 self.tcx.normalize_associated_type_in_env(
462 &f.ty(self.tcx, substs),
463 self.param_env()
464 );
465 (base_lv.clone().field(Field::new(i), field_ty), subpath)
466 }).collect()
467 }
468
469 /// Create one-half of the drop ladder for a list of fields, and return
470 /// the list of steps in it in reverse order.
471 ///
472 /// `unwind_ladder` is such a list of steps in reverse order,
473 /// which is called instead of the next step if the drop unwinds
474 /// (the first field is never reached). If it is `None`, all
475 /// unwind targets are left blank.
476 fn drop_halfladder<'a>(&mut self,
477 c: &DropCtxt<'a, 'tcx>,
478 unwind_ladder: Option<Vec<BasicBlock>>,
479 succ: BasicBlock,
480 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
481 is_cleanup: bool)
482 -> Vec<BasicBlock>
483 {
484 let mut succ = succ;
485 let mut unwind_succ = if is_cleanup {
486 None
487 } else {
488 c.unwind
489 };
490 let mut update_drop_flag = true;
491
492 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
493 let drop_block = match path {
494 Some(path) => {
495 debug!("drop_ladder: for std field {} ({:?})", i, lv);
496
497 self.elaborated_drop_block(&DropCtxt {
498 source_info: c.source_info,
499 is_cleanup: is_cleanup,
500 init_data: c.init_data,
501 lvalue: lv,
502 path: path,
503 succ: succ,
504 unwind: unwind_succ,
505 })
506 }
507 None => {
508 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
509
510 let blk = self.complete_drop(&DropCtxt {
511 source_info: c.source_info,
512 is_cleanup: is_cleanup,
513 init_data: c.init_data,
514 lvalue: lv,
515 path: c.path,
516 succ: succ,
517 unwind: unwind_succ,
518 }, update_drop_flag);
519
520 // the drop flag has been updated - updating
521 // it again would clobber it.
522 update_drop_flag = false;
523
524 blk
525 }
526 };
527
528 succ = drop_block;
529 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
530
531 drop_block
532 }).collect()
533 }
534
535 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
536 ///
537 /// For example, with 3 fields, the drop ladder is
538 ///
539 /// .d0:
540 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
541 /// .d1:
542 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
543 /// .d2:
544 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
545 /// .c1:
546 /// ELAB(drop location.1 [target=.c2])
547 /// .c2:
548 /// ELAB(drop location.2 [target=`c.unwind])
549 fn drop_ladder<'a>(&mut self,
550 c: &DropCtxt<'a, 'tcx>,
551 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
552 -> BasicBlock
553 {
554 debug!("drop_ladder({:?}, {:?})", c, fields);
555
556 let mut fields = fields;
557 fields.retain(|&(ref lvalue, _)| {
558 let ty = self.mir.lvalue_ty(self.tcx, lvalue).to_ty(self.tcx);
559 self.tcx.type_needs_drop_given_env(ty, self.param_env())
560 });
561
562 debug!("drop_ladder - fields needing drop: {:?}", fields);
563
564 let unwind_ladder = if c.is_cleanup {
565 None
566 } else {
567 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
568 };
569
570 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
571 .last().cloned().unwrap_or(c.succ)
572 }
573
574 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
575 -> BasicBlock
576 {
577 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
578
579 let fields = tys.iter().enumerate().map(|(i, &ty)| {
580 (c.lvalue.clone().field(Field::new(i), ty),
581 super::move_path_children_matching(
582 &self.move_data().move_paths, c.path, |proj| match proj {
583 &Projection {
584 elem: ProjectionElem::Field(f, _), ..
585 } => f.index() == i,
586 _ => false
587 }
588 ))
589 }).collect();
590
591 self.drop_ladder(c, fields)
592 }
593
594 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
595 -> BasicBlock
596 {
597 debug!("open_drop_for_box({:?}, {:?})", c, ty);
598
599 let interior_path = super::move_path_children_matching(
600 &self.move_data().move_paths, c.path, |proj| match proj {
601 &Projection { elem: ProjectionElem::Deref, .. } => true,
602 _ => false
603 }).unwrap();
604
605 let interior = c.lvalue.clone().deref();
606 let inner_c = DropCtxt {
607 lvalue: &interior,
608 unwind: c.unwind.map(|u| {
609 self.box_free_block(c, ty, u, true)
610 }),
611 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
612 path: interior_path,
613 ..*c
614 };
615
616 self.elaborated_drop_block(&inner_c)
617 }
618
619 fn open_drop_for_variant<'a>(&mut self,
620 c: &DropCtxt<'a, 'tcx>,
621 drop_block: &mut Option<BasicBlock>,
622 adt: ty::AdtDef<'tcx>,
623 substs: &'tcx Substs<'tcx>,
624 variant_index: usize)
625 -> BasicBlock
626 {
627 let move_paths = &self.move_data().move_paths;
628
629 let subpath = super::move_path_children_matching(
630 move_paths, c.path, |proj| match proj {
631 &Projection {
632 elem: ProjectionElem::Downcast(_, idx), ..
633 } => idx == variant_index,
634 _ => false
635 });
636
637 if let Some(variant_path) = subpath {
638 let base_lv = c.lvalue.clone().elem(
639 ProjectionElem::Downcast(adt, variant_index)
640 );
641 let fields = self.move_paths_for_fields(
642 &base_lv,
643 variant_path,
644 &adt.variants[variant_index],
645 substs);
646 self.drop_ladder(c, fields)
647 } else {
648 // variant not found - drop the entire enum
649 if let None = *drop_block {
650 *drop_block = Some(self.complete_drop(c, true));
651 }
652 return drop_block.unwrap();
653 }
654 }
655
656 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
657 adt: ty::AdtDef<'tcx>, substs: &'tcx Substs<'tcx>)
658 -> BasicBlock {
659 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
660
661 let mut drop_block = None;
662
663 match adt.variants.len() {
664 1 => {
665 let fields = self.move_paths_for_fields(
666 c.lvalue,
667 c.path,
668 &adt.variants[0],
669 substs
670 );
671 self.drop_ladder(c, fields)
672 }
673 _ => {
674 let variant_drops : Vec<BasicBlock> =
675 (0..adt.variants.len()).map(|i| {
676 self.open_drop_for_variant(c, &mut drop_block,
677 adt, substs, i)
678 }).collect();
679
680 // If there are multiple variants, then if something
681 // is present within the enum the discriminant, tracked
682 // by the rest path, must be initialized.
683 //
684 // Additionally, we do not want to switch on the
685 // discriminant after it is free-ed, because that
686 // way lies only trouble.
687
688 let switch_block = self.new_block(
689 c, c.is_cleanup, TerminatorKind::Switch {
690 discr: c.lvalue.clone(),
691 adt_def: adt,
692 targets: variant_drops
693 });
694
695 self.drop_flag_test_block(c, switch_block)
696 }
697 }
698 }
699
700 /// The slow-path - create an "open", elaborated drop for a type
701 /// which is moved-out-of only partially, and patch `bb` to a jump
702 /// to it. This must not be called on ADTs with a destructor,
703 /// as these can't be moved-out-of, except for `Box<T>`, which is
704 /// special-cased.
705 ///
706 /// This creates a "drop ladder" that drops the needed fields of the
707 /// ADT, both in the success case or if one of the destructors fail.
708 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
709 let ty = self.mir.lvalue_ty(self.tcx, c.lvalue).to_ty(self.tcx);
710 match ty.sty {
711 ty::TyStruct(def, substs) | ty::TyEnum(def, substs) => {
712 self.open_drop_for_adt(c, def, substs)
713 }
714 ty::TyTuple(tys) | ty::TyClosure(_, ty::ClosureSubsts {
715 upvar_tys: tys, ..
716 }) => {
717 self.open_drop_for_tuple(c, tys)
718 }
719 ty::TyBox(ty) => {
720 self.open_drop_for_box(c, ty)
721 }
722 _ => bug!("open drop from non-ADT `{:?}`", ty)
723 }
724 }
725
726 /// Return a basic block that drop an lvalue using the context
727 /// and path in `c`. If `update_drop_flag` is true, also
728 /// clear `c`.
729 ///
730 /// if FLAG(c.path)
731 /// if(update_drop_flag) FLAG(c.path) = false
732 /// drop(c.lv)
733 fn complete_drop<'a>(
734 &mut self,
735 c: &DropCtxt<'a, 'tcx>,
736 update_drop_flag: bool)
737 -> BasicBlock
738 {
739 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
740
741 let drop_block = self.drop_block(c);
742 if update_drop_flag {
743 self.set_drop_flag(
744 Location { block: drop_block, index: 0 },
745 c.path,
746 DropFlagState::Absent
747 );
748 }
749
750 self.drop_flag_test_block(c, drop_block)
751 }
752
753 /// Create a simple conditional drop.
754 ///
755 /// if FLAG(c.lv)
756 /// FLAGS(c.lv) = false
757 /// drop(c.lv)
758 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
759 -> BasicBlock
760 {
761 debug!("conditional_drop({:?})", c);
762 let drop_bb = self.drop_block(c);
763 self.drop_flags_for_drop(c, drop_bb);
764
765 self.drop_flag_test_block(c, drop_bb)
766 }
767
768 fn new_block<'a>(&mut self,
769 c: &DropCtxt<'a, 'tcx>,
770 is_cleanup: bool,
771 k: TerminatorKind<'tcx>)
772 -> BasicBlock
773 {
774 self.patch.new_block(BasicBlockData {
775 statements: vec![],
776 terminator: Some(Terminator {
777 source_info: c.source_info, kind: k
778 }),
779 is_cleanup: is_cleanup
780 })
781 }
782
783 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
784 debug!("elaborated_drop_block({:?})", c);
785 let blk = self.drop_block(c);
786 self.elaborate_drop(c, blk);
787 blk
788 }
789
790 fn drop_flag_test_block<'a>(&mut self,
791 c: &DropCtxt<'a, 'tcx>,
792 on_set: BasicBlock)
793 -> BasicBlock {
794 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
795 }
796
797 fn drop_flag_test_block_with_succ<'a>(&mut self,
798 c: &DropCtxt<'a, 'tcx>,
799 is_cleanup: bool,
800 on_set: BasicBlock,
801 on_unset: BasicBlock)
802 -> BasicBlock
803 {
804 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
805 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
806 c, is_cleanup, on_set, (maybe_live, maybe_dead));
807
808 match (maybe_live, maybe_dead) {
809 (false, _) => on_unset,
810 (true, false) => on_set,
811 (true, true) => {
812 let flag = self.drop_flag(c.path).unwrap();
813 self.new_block(c, is_cleanup, TerminatorKind::If {
814 cond: Operand::Consume(flag),
815 targets: (on_set, on_unset)
816 })
817 }
818 }
819 }
820
821 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
822 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
823 location: c.lvalue.clone(),
824 target: c.succ,
825 unwind: c.unwind
826 })
827 }
828
829 fn box_free_block<'a>(
830 &mut self,
831 c: &DropCtxt<'a, 'tcx>,
832 ty: Ty<'tcx>,
833 target: BasicBlock,
834 is_cleanup: bool
835 ) -> BasicBlock {
836 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
837 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
838 }
839
840 fn unelaborated_free_block<'a>(
841 &mut self,
842 c: &DropCtxt<'a, 'tcx>,
843 ty: Ty<'tcx>,
844 target: BasicBlock,
845 is_cleanup: bool
846 ) -> BasicBlock {
847 let mut statements = vec![];
848 if let Some(&flag) = self.drop_flags.get(&c.path) {
849 statements.push(Statement {
850 source_info: c.source_info,
851 kind: StatementKind::Assign(
852 Lvalue::Temp(flag),
853 self.constant_bool(c.source_info.span, false)
854 )
855 });
856 }
857
858 let tcx = self.tcx;
859 let unit_temp = Lvalue::Temp(self.patch.new_temp(tcx.mk_nil()));
860 let free_func = tcx.lang_items.require(lang_items::BoxFreeFnLangItem)
861 .unwrap_or_else(|e| tcx.sess.fatal(&e));
862 let substs = tcx.mk_substs(Substs::new(
863 VecPerParamSpace::new(vec![], vec![], vec![ty]),
864 VecPerParamSpace::new(vec![], vec![], vec![])
865 ));
866 let fty = tcx.lookup_item_type(free_func).ty.subst(tcx, substs);
867
868 self.patch.new_block(BasicBlockData {
869 statements: statements,
870 terminator: Some(Terminator {
871 source_info: c.source_info, kind: TerminatorKind::Call {
872 func: Operand::Constant(Constant {
873 span: c.source_info.span,
874 ty: fty,
875 literal: Literal::Item {
876 def_id: free_func,
877 substs: substs
878 }
879 }),
880 args: vec![Operand::Consume(c.lvalue.clone())],
881 destination: Some((unit_temp, target)),
882 cleanup: None
883 }
884 }),
885 is_cleanup: is_cleanup
886 })
887 }
888
889 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
890 // if we have a destuctor, we must *not* split the drop.
891
892 // dataflow can create unneeded children in some cases
893 // - be sure to ignore them.
894
895 let ty = self.mir.lvalue_ty(self.tcx, c.lvalue).to_ty(self.tcx);
896
897 match ty.sty {
898 ty::TyStruct(def, _) | ty::TyEnum(def, _) => {
899 if def.has_dtor() {
900 self.tcx.sess.span_warn(
901 c.source_info.span,
902 &format!("dataflow bug??? moving out of type with dtor {:?}",
903 c));
904 true
905 } else {
906 false
907 }
908 }
909 _ => false
910 }
911 }
912
913 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
914 Rvalue::Use(Operand::Constant(Constant {
915 span: span,
916 ty: self.tcx.types.bool,
917 literal: Literal::Value { value: ConstVal::Bool(val) }
918 }))
919 }
920
921 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
922 if let Some(&flag) = self.drop_flags.get(&path) {
923 let span = self.patch.source_info_for_location(self.mir, loc).span;
924 let val = self.constant_bool(span, val.value());
925 self.patch.add_assign(loc, Lvalue::Temp(flag), val);
926 }
927 }
928
929 fn drop_flags_on_init(&mut self) {
930 let loc = Location { block: START_BLOCK, index: 0 };
931 let span = self.patch.source_info_for_location(self.mir, loc).span;
932 let false_ = self.constant_bool(span, false);
933 for flag in self.drop_flags.values() {
934 self.patch.add_assign(loc, Lvalue::Temp(*flag), false_.clone());
935 }
936 }
937
938 fn drop_flags_for_fn_rets(&mut self) {
939 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
940 if let TerminatorKind::Call {
941 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
942 } = data.terminator().kind {
943 assert!(!self.patch.is_patched(bb));
944
945 let loc = Location { block: tgt, index: 0 };
946 let path = self.move_data().rev_lookup.find(lv);
947 on_all_children_bits(
948 self.tcx, self.mir, self.move_data(), path,
949 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
950 );
951 }
952 }
953 }
954
955 fn drop_flags_for_args(&mut self) {
956 let loc = Location { block: START_BLOCK, index: 0 };
957 super::drop_flag_effects_for_function_entry(
958 self.tcx, self.mir, self.env, |path, ds| {
959 self.set_drop_flag(loc, path, ds);
960 }
961 )
962 }
963
964 fn drop_flags_for_locs(&mut self) {
965 // We intentionally iterate only over the *old* basic blocks.
966 //
967 // Basic blocks created by drop elaboration update their
968 // drop flags by themselves, to avoid the drop flags being
969 // clobbered before they are read.
970
971 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
972 debug!("drop_flags_for_locs({:?})", data);
973 for i in 0..(data.statements.len()+1) {
974 debug!("drop_flag_for_locs: stmt {}", i);
975 let mut allow_initializations = true;
976 if i == data.statements.len() {
977 match data.terminator().kind {
978 TerminatorKind::Drop { .. } => {
979 // drop elaboration should handle that by itself
980 continue
981 }
982 TerminatorKind::DropAndReplace { .. } => {
983 // this contains the move of the source and
984 // the initialization of the destination. We
985 // only want the former - the latter is handled
986 // by the elaboration code and must be done
987 // *after* the destination is dropped.
988 assert!(self.patch.is_patched(bb));
989 allow_initializations = false;
990 }
991 _ => {
992 assert!(!self.patch.is_patched(bb));
993 }
994 }
995 }
996 let loc = Location { block: bb, index: i };
997 super::drop_flag_effects_for_location(
998 self.tcx, self.mir, self.env, loc, |path, ds| {
999 if ds == DropFlagState::Absent || allow_initializations {
1000 self.set_drop_flag(loc, path, ds)
1001 }
1002 }
1003 )
1004 }
1005
1006 // There may be a critical edge after this call,
1007 // so mark the return as initialized *before* the
1008 // call.
1009 if let TerminatorKind::Call {
1010 destination: Some((ref lv, _)), cleanup: None, ..
1011 } = data.terminator().kind {
1012 assert!(!self.patch.is_patched(bb));
1013
1014 let loc = Location { block: bb, index: data.statements.len() };
1015 let path = self.move_data().rev_lookup.find(lv);
1016 on_all_children_bits(
1017 self.tcx, self.mir, self.move_data(), path,
1018 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1019 );
1020 }
1021 }
1022 }
1023
1024 fn drop_flags_for_drop<'a>(&mut self,
1025 c: &DropCtxt<'a, 'tcx>,
1026 bb: BasicBlock)
1027 {
1028 let loc = self.patch.terminator_loc(self.mir, bb);
1029 on_all_children_bits(
1030 self.tcx, self.mir, self.move_data(), c.path,
1031 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)
1032 );
1033 }
1034}