]> git.proxmox.com Git - rustc.git/blob - src/librustc_borrowck/borrowck/mir/elaborate_drops.rs
New upstream version 1.13.0+dfsg1
[rustc.git] / src / librustc_borrowck / borrowck / mir / elaborate_drops.rs
1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use indexed_set::IdxSetBuf;
12 use super::gather_moves::{MoveData, MovePathIndex, LookupResult};
13 use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals};
14 use super::dataflow::{DataflowResults};
15 use super::{drop_flag_effects_for_location, on_all_children_bits};
16 use super::on_lookup_result_bits;
17 use super::{DropFlagState, MoveDataParamEnv};
18 use super::patch::MirPatch;
19 use rustc::ty::{self, Ty, TyCtxt};
20 use rustc::ty::subst::{Kind, Subst, Substs};
21 use rustc::mir::repr::*;
22 use rustc::mir::transform::{Pass, MirPass, MirSource};
23 use rustc::middle::const_val::ConstVal;
24 use rustc::middle::lang_items;
25 use rustc::util::nodemap::FnvHashMap;
26 use rustc_data_structures::indexed_vec::Idx;
27 use syntax_pos::Span;
28
29 use std::fmt;
30 use std::iter;
31 use std::u32;
32
33 pub struct ElaborateDrops;
34
35 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
36 fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
37 src: MirSource, mir: &mut Mir<'tcx>)
38 {
39 debug!("elaborate_drops({:?} @ {:?})", src, mir.span);
40 match src {
41 MirSource::Fn(..) => {},
42 _ => return
43 }
44 let id = src.item_id();
45 let param_env = ty::ParameterEnvironment::for_item(tcx, id);
46 let move_data = MoveData::gather_moves(mir, tcx, &param_env);
47 let elaborate_patch = {
48 let mir = &*mir;
49 let env = MoveDataParamEnv {
50 move_data: move_data,
51 param_env: param_env
52 };
53 let flow_inits =
54 super::do_dataflow(tcx, mir, id, &[], &env,
55 MaybeInitializedLvals::new(tcx, mir));
56 let flow_uninits =
57 super::do_dataflow(tcx, mir, id, &[], &env,
58 MaybeUninitializedLvals::new(tcx, mir));
59
60 ElaborateDropsCtxt {
61 tcx: tcx,
62 mir: mir,
63 env: &env,
64 flow_inits: flow_inits,
65 flow_uninits: flow_uninits,
66 drop_flags: FnvHashMap(),
67 patch: MirPatch::new(mir),
68 }.elaborate()
69 };
70 elaborate_patch.apply(mir);
71 }
72 }
73
74 impl Pass for ElaborateDrops {}
75
76 struct InitializationData {
77 live: IdxSetBuf<MovePathIndex>,
78 dead: IdxSetBuf<MovePathIndex>
79 }
80
81 impl InitializationData {
82 fn apply_location<'a,'tcx>(&mut self,
83 tcx: TyCtxt<'a, 'tcx, 'tcx>,
84 mir: &Mir<'tcx>,
85 env: &MoveDataParamEnv<'tcx>,
86 loc: Location)
87 {
88 drop_flag_effects_for_location(tcx, mir, env, loc, |path, df| {
89 debug!("at location {:?}: setting {:?} to {:?}",
90 loc, path, df);
91 match df {
92 DropFlagState::Present => {
93 self.live.add(&path);
94 self.dead.remove(&path);
95 }
96 DropFlagState::Absent => {
97 self.dead.add(&path);
98 self.live.remove(&path);
99 }
100 }
101 });
102 }
103
104 fn state(&self, path: MovePathIndex) -> (bool, bool) {
105 (self.live.contains(&path), self.dead.contains(&path))
106 }
107 }
108
109 impl fmt::Debug for InitializationData {
110 fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
111 Ok(())
112 }
113 }
114
115 struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
116 tcx: TyCtxt<'a, 'tcx, 'tcx>,
117 mir: &'a Mir<'tcx>,
118 env: &'a MoveDataParamEnv<'tcx>,
119 flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
120 flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
121 drop_flags: FnvHashMap<MovePathIndex, Temp>,
122 patch: MirPatch<'tcx>,
123 }
124
125 #[derive(Copy, Clone, Debug)]
126 struct DropCtxt<'a, 'tcx: 'a> {
127 source_info: SourceInfo,
128 is_cleanup: bool,
129
130 init_data: &'a InitializationData,
131
132 lvalue: &'a Lvalue<'tcx>,
133 path: MovePathIndex,
134 succ: BasicBlock,
135 unwind: Option<BasicBlock>
136 }
137
138 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
139 fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
140 fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
141 &self.env.param_env
142 }
143
144 fn initialization_data_at(&self, loc: Location) -> InitializationData {
145 let mut data = InitializationData {
146 live: self.flow_inits.sets().on_entry_set_for(loc.block.index())
147 .to_owned(),
148 dead: self.flow_uninits.sets().on_entry_set_for(loc.block.index())
149 .to_owned(),
150 };
151 for stmt in 0..loc.statement_index {
152 data.apply_location(self.tcx, self.mir, self.env,
153 Location { block: loc.block, statement_index: stmt });
154 }
155 data
156 }
157
158 fn create_drop_flag(&mut self, index: MovePathIndex) {
159 let tcx = self.tcx;
160 let patch = &mut self.patch;
161 self.drop_flags.entry(index).or_insert_with(|| {
162 patch.new_temp(tcx.types.bool)
163 });
164 }
165
166 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
167 self.drop_flags.get(&index).map(|t| Lvalue::Temp(*t))
168 }
169
170 /// create a patch that elaborates all drops in the input
171 /// MIR.
172 fn elaborate(mut self) -> MirPatch<'tcx>
173 {
174 self.collect_drop_flags();
175
176 self.elaborate_drops();
177
178 self.drop_flags_on_init();
179 self.drop_flags_for_fn_rets();
180 self.drop_flags_for_args();
181 self.drop_flags_for_locs();
182
183 self.patch
184 }
185
186 fn path_needs_drop(&self, path: MovePathIndex) -> bool
187 {
188 let lvalue = &self.move_data().move_paths[path].lvalue;
189 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
190 debug!("path_needs_drop({:?}, {:?} : {:?})", path, lvalue, ty);
191
192 self.tcx.type_needs_drop_given_env(ty, self.param_env())
193 }
194
195 fn collect_drop_flags(&mut self)
196 {
197 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
198 let terminator = data.terminator();
199 let location = match terminator.kind {
200 TerminatorKind::Drop { ref location, .. } |
201 TerminatorKind::DropAndReplace { ref location, .. } => location,
202 _ => continue
203 };
204
205 let init_data = self.initialization_data_at(Location {
206 block: bb,
207 statement_index: data.statements.len()
208 });
209
210 let path = self.move_data().rev_lookup.find(location);
211 debug!("collect_drop_flags: {:?}, lv {:?} ({:?})",
212 bb, location, path);
213
214 let path = match path {
215 LookupResult::Exact(e) => e,
216 LookupResult::Parent(None) => continue,
217 LookupResult::Parent(Some(parent)) => {
218 let (_maybe_live, maybe_dead) = init_data.state(parent);
219 if maybe_dead {
220 span_bug!(terminator.source_info.span,
221 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
222 bb, location, path);
223 }
224 continue
225 }
226 };
227
228 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
229 if self.path_needs_drop(child) {
230 let (maybe_live, maybe_dead) = init_data.state(child);
231 debug!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
232 child, location, path, (maybe_live, maybe_dead));
233 if maybe_live && maybe_dead {
234 self.create_drop_flag(child)
235 }
236 }
237 });
238 }
239 }
240
241 fn elaborate_drops(&mut self)
242 {
243 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
244 let loc = Location { block: bb, statement_index: data.statements.len() };
245 let terminator = data.terminator();
246
247 let resume_block = self.patch.resume_block();
248 match terminator.kind {
249 TerminatorKind::Drop { ref location, target, unwind } => {
250 let init_data = self.initialization_data_at(loc);
251 match self.move_data().rev_lookup.find(location) {
252 LookupResult::Exact(path) => {
253 self.elaborate_drop(&DropCtxt {
254 source_info: terminator.source_info,
255 is_cleanup: data.is_cleanup,
256 init_data: &init_data,
257 lvalue: location,
258 path: path,
259 succ: target,
260 unwind: if data.is_cleanup {
261 None
262 } else {
263 Some(Option::unwrap_or(unwind, resume_block))
264 }
265 }, bb);
266 }
267 LookupResult::Parent(..) => {
268 span_bug!(terminator.source_info.span,
269 "drop of untracked value {:?}", bb);
270 }
271 }
272 }
273 TerminatorKind::DropAndReplace { ref location, ref value,
274 target, unwind } =>
275 {
276 assert!(!data.is_cleanup);
277
278 self.elaborate_replace(
279 loc,
280 location, value,
281 target, unwind
282 );
283 }
284 _ => continue
285 }
286 }
287 }
288
289 /// Elaborate a MIR `replace` terminator. This instruction
290 /// is not directly handled by translation, and therefore
291 /// must be desugared.
292 ///
293 /// The desugaring drops the location if needed, and then writes
294 /// the value (including setting the drop flag) over it in *both* arms.
295 ///
296 /// The `replace` terminator can also be called on lvalues that
297 /// are not tracked by elaboration (for example,
298 /// `replace x[i] <- tmp0`). The borrow checker requires that
299 /// these locations are initialized before the assignment,
300 /// so we just generate an unconditional drop.
301 fn elaborate_replace(
302 &mut self,
303 loc: Location,
304 location: &Lvalue<'tcx>,
305 value: &Operand<'tcx>,
306 target: BasicBlock,
307 unwind: Option<BasicBlock>)
308 {
309 let bb = loc.block;
310 let data = &self.mir[bb];
311 let terminator = data.terminator();
312
313 let assign = Statement {
314 kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
315 source_info: terminator.source_info
316 };
317
318 let unwind = unwind.unwrap_or(self.patch.resume_block());
319 let unwind = self.patch.new_block(BasicBlockData {
320 statements: vec![assign.clone()],
321 terminator: Some(Terminator {
322 kind: TerminatorKind::Goto { target: unwind },
323 ..*terminator
324 }),
325 is_cleanup: true
326 });
327
328 let target = self.patch.new_block(BasicBlockData {
329 statements: vec![assign],
330 terminator: Some(Terminator {
331 kind: TerminatorKind::Goto { target: target },
332 ..*terminator
333 }),
334 is_cleanup: data.is_cleanup,
335 });
336
337 match self.move_data().rev_lookup.find(location) {
338 LookupResult::Exact(path) => {
339 debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
340 let init_data = self.initialization_data_at(loc);
341
342 self.elaborate_drop(&DropCtxt {
343 source_info: terminator.source_info,
344 is_cleanup: data.is_cleanup,
345 init_data: &init_data,
346 lvalue: location,
347 path: path,
348 succ: target,
349 unwind: Some(unwind)
350 }, bb);
351 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
352 self.set_drop_flag(Location { block: target, statement_index: 0 },
353 child, DropFlagState::Present);
354 self.set_drop_flag(Location { block: unwind, statement_index: 0 },
355 child, DropFlagState::Present);
356 });
357 }
358 LookupResult::Parent(parent) => {
359 // drop and replace behind a pointer/array/whatever. The location
360 // must be initialized.
361 debug!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator, parent);
362 self.patch.patch_terminator(bb, TerminatorKind::Drop {
363 location: location.clone(),
364 target: target,
365 unwind: Some(unwind)
366 });
367 }
368 }
369 }
370
371 /// This elaborates a single drop instruction, located at `bb`, and
372 /// patches over it.
373 ///
374 /// The elaborated drop checks the drop flags to only drop what
375 /// is initialized.
376 ///
377 /// In addition, the relevant drop flags also need to be cleared
378 /// to avoid double-drops. However, in the middle of a complex
379 /// drop, one must avoid clearing some of the flags before they
380 /// are read, as that would cause a memory leak.
381 ///
382 /// In particular, when dropping an ADT, multiple fields may be
383 /// joined together under the `rest` subpath. They are all controlled
384 /// by the primary drop flag, but only the last rest-field dropped
385 /// should clear it (and it must also not clear anything else).
386 ///
387 /// FIXME: I think we should just control the flags externally
388 /// and then we do not need this machinery.
389 fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
390 debug!("elaborate_drop({:?})", c);
391
392 let mut some_live = false;
393 let mut some_dead = false;
394 let mut children_count = 0;
395 on_all_children_bits(
396 self.tcx, self.mir, self.move_data(),
397 c.path, |child| {
398 if self.path_needs_drop(child) {
399 let (live, dead) = c.init_data.state(child);
400 debug!("elaborate_drop: state({:?}) = {:?}",
401 child, (live, dead));
402 some_live |= live;
403 some_dead |= dead;
404 children_count += 1;
405 }
406 });
407
408 debug!("elaborate_drop({:?}): live - {:?}", c,
409 (some_live, some_dead));
410 match (some_live, some_dead) {
411 (false, false) | (false, true) => {
412 // dead drop - patch it out
413 self.patch.patch_terminator(bb, TerminatorKind::Goto {
414 target: c.succ
415 });
416 }
417 (true, false) => {
418 // static drop - just set the flag
419 self.patch.patch_terminator(bb, TerminatorKind::Drop {
420 location: c.lvalue.clone(),
421 target: c.succ,
422 unwind: c.unwind
423 });
424 self.drop_flags_for_drop(c, bb);
425 }
426 (true, true) => {
427 // dynamic drop
428 let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
429 self.conditional_drop(c)
430 } else {
431 self.open_drop(c)
432 };
433 self.patch.patch_terminator(bb, TerminatorKind::Goto {
434 target: drop_bb
435 });
436 }
437 }
438 }
439
440 /// Return the lvalue and move path for each field of `variant`,
441 /// (the move path is `None` if the field is a rest field).
442 fn move_paths_for_fields(&self,
443 base_lv: &Lvalue<'tcx>,
444 variant_path: MovePathIndex,
445 variant: ty::VariantDef<'tcx>,
446 substs: &'tcx Substs<'tcx>)
447 -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
448 {
449 variant.fields.iter().enumerate().map(|(i, f)| {
450 let subpath =
451 super::move_path_children_matching(self.move_data(), variant_path, |p| {
452 match p {
453 &Projection {
454 elem: ProjectionElem::Field(idx, _), ..
455 } => idx.index() == i,
456 _ => false
457 }
458 });
459
460 let field_ty =
461 self.tcx.normalize_associated_type_in_env(
462 &f.ty(self.tcx, substs),
463 self.param_env()
464 );
465 (base_lv.clone().field(Field::new(i), field_ty), subpath)
466 }).collect()
467 }
468
469 /// Create one-half of the drop ladder for a list of fields, and return
470 /// the list of steps in it in reverse order.
471 ///
472 /// `unwind_ladder` is such a list of steps in reverse order,
473 /// which is called instead of the next step if the drop unwinds
474 /// (the first field is never reached). If it is `None`, all
475 /// unwind targets are left blank.
476 fn drop_halfladder<'a>(&mut self,
477 c: &DropCtxt<'a, 'tcx>,
478 unwind_ladder: Option<Vec<BasicBlock>>,
479 succ: BasicBlock,
480 fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
481 is_cleanup: bool)
482 -> Vec<BasicBlock>
483 {
484 let mut succ = succ;
485 let mut unwind_succ = if is_cleanup {
486 None
487 } else {
488 c.unwind
489 };
490 let mut update_drop_flag = true;
491
492 fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
493 let drop_block = match path {
494 Some(path) => {
495 debug!("drop_ladder: for std field {} ({:?})", i, lv);
496
497 self.elaborated_drop_block(&DropCtxt {
498 source_info: c.source_info,
499 is_cleanup: is_cleanup,
500 init_data: c.init_data,
501 lvalue: lv,
502 path: path,
503 succ: succ,
504 unwind: unwind_succ,
505 })
506 }
507 None => {
508 debug!("drop_ladder: for rest field {} ({:?})", i, lv);
509
510 let blk = self.complete_drop(&DropCtxt {
511 source_info: c.source_info,
512 is_cleanup: is_cleanup,
513 init_data: c.init_data,
514 lvalue: lv,
515 path: c.path,
516 succ: succ,
517 unwind: unwind_succ,
518 }, update_drop_flag);
519
520 // the drop flag has been updated - updating
521 // it again would clobber it.
522 update_drop_flag = false;
523
524 blk
525 }
526 };
527
528 succ = drop_block;
529 unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
530
531 drop_block
532 }).collect()
533 }
534
535 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
536 ///
537 /// For example, with 3 fields, the drop ladder is
538 ///
539 /// .d0:
540 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
541 /// .d1:
542 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
543 /// .d2:
544 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
545 /// .c1:
546 /// ELAB(drop location.1 [target=.c2])
547 /// .c2:
548 /// ELAB(drop location.2 [target=`c.unwind])
549 fn drop_ladder<'a>(&mut self,
550 c: &DropCtxt<'a, 'tcx>,
551 fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
552 -> BasicBlock
553 {
554 debug!("drop_ladder({:?}, {:?})", c, fields);
555
556 let mut fields = fields;
557 fields.retain(|&(ref lvalue, _)| {
558 let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
559 self.tcx.type_needs_drop_given_env(ty, self.param_env())
560 });
561
562 debug!("drop_ladder - fields needing drop: {:?}", fields);
563
564 let unwind_ladder = if c.is_cleanup {
565 None
566 } else {
567 Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
568 };
569
570 self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
571 .last().cloned().unwrap_or(c.succ)
572 }
573
574 fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
575 -> BasicBlock
576 {
577 debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
578
579 let fields = tys.iter().enumerate().map(|(i, &ty)| {
580 (c.lvalue.clone().field(Field::new(i), ty),
581 super::move_path_children_matching(
582 self.move_data(), c.path, |proj| match proj {
583 &Projection {
584 elem: ProjectionElem::Field(f, _), ..
585 } => f.index() == i,
586 _ => false
587 }
588 ))
589 }).collect();
590
591 self.drop_ladder(c, fields)
592 }
593
594 fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
595 -> BasicBlock
596 {
597 debug!("open_drop_for_box({:?}, {:?})", c, ty);
598
599 let interior_path = super::move_path_children_matching(
600 self.move_data(), c.path, |proj| match proj {
601 &Projection { elem: ProjectionElem::Deref, .. } => true,
602 _ => false
603 }).unwrap();
604
605 let interior = c.lvalue.clone().deref();
606 let inner_c = DropCtxt {
607 lvalue: &interior,
608 unwind: c.unwind.map(|u| {
609 self.box_free_block(c, ty, u, true)
610 }),
611 succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
612 path: interior_path,
613 ..*c
614 };
615
616 self.elaborated_drop_block(&inner_c)
617 }
618
619 fn open_drop_for_variant<'a>(&mut self,
620 c: &DropCtxt<'a, 'tcx>,
621 drop_block: &mut Option<BasicBlock>,
622 adt: ty::AdtDef<'tcx>,
623 substs: &'tcx Substs<'tcx>,
624 variant_index: usize)
625 -> BasicBlock
626 {
627 let subpath = super::move_path_children_matching(
628 self.move_data(), c.path, |proj| match proj {
629 &Projection {
630 elem: ProjectionElem::Downcast(_, idx), ..
631 } => idx == variant_index,
632 _ => false
633 });
634
635 if let Some(variant_path) = subpath {
636 let base_lv = c.lvalue.clone().elem(
637 ProjectionElem::Downcast(adt, variant_index)
638 );
639 let fields = self.move_paths_for_fields(
640 &base_lv,
641 variant_path,
642 &adt.variants[variant_index],
643 substs);
644 self.drop_ladder(c, fields)
645 } else {
646 // variant not found - drop the entire enum
647 if let None = *drop_block {
648 *drop_block = Some(self.complete_drop(c, true));
649 }
650 return drop_block.unwrap();
651 }
652 }
653
654 fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
655 adt: ty::AdtDef<'tcx>, substs: &'tcx Substs<'tcx>)
656 -> BasicBlock {
657 debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
658
659 let mut drop_block = None;
660
661 match adt.variants.len() {
662 1 => {
663 let fields = self.move_paths_for_fields(
664 c.lvalue,
665 c.path,
666 &adt.variants[0],
667 substs
668 );
669 self.drop_ladder(c, fields)
670 }
671 _ => {
672 let variant_drops : Vec<BasicBlock> =
673 (0..adt.variants.len()).map(|i| {
674 self.open_drop_for_variant(c, &mut drop_block,
675 adt, substs, i)
676 }).collect();
677
678 // If there are multiple variants, then if something
679 // is present within the enum the discriminant, tracked
680 // by the rest path, must be initialized.
681 //
682 // Additionally, we do not want to switch on the
683 // discriminant after it is free-ed, because that
684 // way lies only trouble.
685
686 let switch_block = self.new_block(
687 c, c.is_cleanup, TerminatorKind::Switch {
688 discr: c.lvalue.clone(),
689 adt_def: adt,
690 targets: variant_drops
691 });
692
693 self.drop_flag_test_block(c, switch_block)
694 }
695 }
696 }
697
698 /// The slow-path - create an "open", elaborated drop for a type
699 /// which is moved-out-of only partially, and patch `bb` to a jump
700 /// to it. This must not be called on ADTs with a destructor,
701 /// as these can't be moved-out-of, except for `Box<T>`, which is
702 /// special-cased.
703 ///
704 /// This creates a "drop ladder" that drops the needed fields of the
705 /// ADT, both in the success case or if one of the destructors fail.
706 fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
707 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
708 match ty.sty {
709 ty::TyAdt(def, substs) => {
710 self.open_drop_for_adt(c, def, substs)
711 }
712 ty::TyTuple(tys) | ty::TyClosure(_, ty::ClosureSubsts {
713 upvar_tys: tys, ..
714 }) => {
715 self.open_drop_for_tuple(c, tys)
716 }
717 ty::TyBox(ty) => {
718 self.open_drop_for_box(c, ty)
719 }
720 _ => bug!("open drop from non-ADT `{:?}`", ty)
721 }
722 }
723
724 /// Return a basic block that drop an lvalue using the context
725 /// and path in `c`. If `update_drop_flag` is true, also
726 /// clear `c`.
727 ///
728 /// if FLAG(c.path)
729 /// if(update_drop_flag) FLAG(c.path) = false
730 /// drop(c.lv)
731 fn complete_drop<'a>(
732 &mut self,
733 c: &DropCtxt<'a, 'tcx>,
734 update_drop_flag: bool)
735 -> BasicBlock
736 {
737 debug!("complete_drop({:?},{:?})", c, update_drop_flag);
738
739 let drop_block = self.drop_block(c);
740 if update_drop_flag {
741 self.set_drop_flag(
742 Location { block: drop_block, statement_index: 0 },
743 c.path,
744 DropFlagState::Absent
745 );
746 }
747
748 self.drop_flag_test_block(c, drop_block)
749 }
750
751 /// Create a simple conditional drop.
752 ///
753 /// if FLAG(c.lv)
754 /// FLAGS(c.lv) = false
755 /// drop(c.lv)
756 fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
757 -> BasicBlock
758 {
759 debug!("conditional_drop({:?})", c);
760 let drop_bb = self.drop_block(c);
761 self.drop_flags_for_drop(c, drop_bb);
762
763 self.drop_flag_test_block(c, drop_bb)
764 }
765
766 fn new_block<'a>(&mut self,
767 c: &DropCtxt<'a, 'tcx>,
768 is_cleanup: bool,
769 k: TerminatorKind<'tcx>)
770 -> BasicBlock
771 {
772 self.patch.new_block(BasicBlockData {
773 statements: vec![],
774 terminator: Some(Terminator {
775 source_info: c.source_info, kind: k
776 }),
777 is_cleanup: is_cleanup
778 })
779 }
780
781 fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
782 debug!("elaborated_drop_block({:?})", c);
783 let blk = self.drop_block(c);
784 self.elaborate_drop(c, blk);
785 blk
786 }
787
788 fn drop_flag_test_block<'a>(&mut self,
789 c: &DropCtxt<'a, 'tcx>,
790 on_set: BasicBlock)
791 -> BasicBlock {
792 self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
793 }
794
795 fn drop_flag_test_block_with_succ<'a>(&mut self,
796 c: &DropCtxt<'a, 'tcx>,
797 is_cleanup: bool,
798 on_set: BasicBlock,
799 on_unset: BasicBlock)
800 -> BasicBlock
801 {
802 let (maybe_live, maybe_dead) = c.init_data.state(c.path);
803 debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
804 c, is_cleanup, on_set, (maybe_live, maybe_dead));
805
806 match (maybe_live, maybe_dead) {
807 (false, _) => on_unset,
808 (true, false) => on_set,
809 (true, true) => {
810 let flag = self.drop_flag(c.path).unwrap();
811 self.new_block(c, is_cleanup, TerminatorKind::If {
812 cond: Operand::Consume(flag),
813 targets: (on_set, on_unset)
814 })
815 }
816 }
817 }
818
819 fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
820 self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
821 location: c.lvalue.clone(),
822 target: c.succ,
823 unwind: c.unwind
824 })
825 }
826
827 fn box_free_block<'a>(
828 &mut self,
829 c: &DropCtxt<'a, 'tcx>,
830 ty: Ty<'tcx>,
831 target: BasicBlock,
832 is_cleanup: bool
833 ) -> BasicBlock {
834 let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
835 self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
836 }
837
838 fn unelaborated_free_block<'a>(
839 &mut self,
840 c: &DropCtxt<'a, 'tcx>,
841 ty: Ty<'tcx>,
842 target: BasicBlock,
843 is_cleanup: bool
844 ) -> BasicBlock {
845 let mut statements = vec![];
846 if let Some(&flag) = self.drop_flags.get(&c.path) {
847 statements.push(Statement {
848 source_info: c.source_info,
849 kind: StatementKind::Assign(
850 Lvalue::Temp(flag),
851 self.constant_bool(c.source_info.span, false)
852 )
853 });
854 }
855
856 let tcx = self.tcx;
857 let unit_temp = Lvalue::Temp(self.patch.new_temp(tcx.mk_nil()));
858 let free_func = tcx.lang_items.require(lang_items::BoxFreeFnLangItem)
859 .unwrap_or_else(|e| tcx.sess.fatal(&e));
860 let substs = Substs::new(tcx, iter::once(Kind::from(ty)));
861 let fty = tcx.lookup_item_type(free_func).ty.subst(tcx, substs);
862
863 self.patch.new_block(BasicBlockData {
864 statements: statements,
865 terminator: Some(Terminator {
866 source_info: c.source_info, kind: TerminatorKind::Call {
867 func: Operand::Constant(Constant {
868 span: c.source_info.span,
869 ty: fty,
870 literal: Literal::Item {
871 def_id: free_func,
872 substs: substs
873 }
874 }),
875 args: vec![Operand::Consume(c.lvalue.clone())],
876 destination: Some((unit_temp, target)),
877 cleanup: None
878 }
879 }),
880 is_cleanup: is_cleanup
881 })
882 }
883
884 fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
885 // if we have a destuctor, we must *not* split the drop.
886
887 // dataflow can create unneeded children in some cases
888 // - be sure to ignore them.
889
890 let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
891
892 match ty.sty {
893 ty::TyAdt(def, _) => {
894 if def.has_dtor() {
895 self.tcx.sess.span_warn(
896 c.source_info.span,
897 &format!("dataflow bug??? moving out of type with dtor {:?}",
898 c));
899 true
900 } else {
901 false
902 }
903 }
904 _ => false
905 }
906 }
907
908 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
909 Rvalue::Use(Operand::Constant(Constant {
910 span: span,
911 ty: self.tcx.types.bool,
912 literal: Literal::Value { value: ConstVal::Bool(val) }
913 }))
914 }
915
916 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
917 if let Some(&flag) = self.drop_flags.get(&path) {
918 let span = self.patch.source_info_for_location(self.mir, loc).span;
919 let val = self.constant_bool(span, val.value());
920 self.patch.add_assign(loc, Lvalue::Temp(flag), val);
921 }
922 }
923
924 fn drop_flags_on_init(&mut self) {
925 let loc = Location { block: START_BLOCK, statement_index: 0 };
926 let span = self.patch.source_info_for_location(self.mir, loc).span;
927 let false_ = self.constant_bool(span, false);
928 for flag in self.drop_flags.values() {
929 self.patch.add_assign(loc, Lvalue::Temp(*flag), false_.clone());
930 }
931 }
932
933 fn drop_flags_for_fn_rets(&mut self) {
934 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
935 if let TerminatorKind::Call {
936 destination: Some((ref lv, tgt)), cleanup: Some(_), ..
937 } = data.terminator().kind {
938 assert!(!self.patch.is_patched(bb));
939
940 let loc = Location { block: tgt, statement_index: 0 };
941 let path = self.move_data().rev_lookup.find(lv);
942 on_lookup_result_bits(
943 self.tcx, self.mir, self.move_data(), path,
944 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
945 );
946 }
947 }
948 }
949
950 fn drop_flags_for_args(&mut self) {
951 let loc = Location { block: START_BLOCK, statement_index: 0 };
952 super::drop_flag_effects_for_function_entry(
953 self.tcx, self.mir, self.env, |path, ds| {
954 self.set_drop_flag(loc, path, ds);
955 }
956 )
957 }
958
959 fn drop_flags_for_locs(&mut self) {
960 // We intentionally iterate only over the *old* basic blocks.
961 //
962 // Basic blocks created by drop elaboration update their
963 // drop flags by themselves, to avoid the drop flags being
964 // clobbered before they are read.
965
966 for (bb, data) in self.mir.basic_blocks().iter_enumerated() {
967 debug!("drop_flags_for_locs({:?})", data);
968 for i in 0..(data.statements.len()+1) {
969 debug!("drop_flag_for_locs: stmt {}", i);
970 let mut allow_initializations = true;
971 if i == data.statements.len() {
972 match data.terminator().kind {
973 TerminatorKind::Drop { .. } => {
974 // drop elaboration should handle that by itself
975 continue
976 }
977 TerminatorKind::DropAndReplace { .. } => {
978 // this contains the move of the source and
979 // the initialization of the destination. We
980 // only want the former - the latter is handled
981 // by the elaboration code and must be done
982 // *after* the destination is dropped.
983 assert!(self.patch.is_patched(bb));
984 allow_initializations = false;
985 }
986 _ => {
987 assert!(!self.patch.is_patched(bb));
988 }
989 }
990 }
991 let loc = Location { block: bb, statement_index: i };
992 super::drop_flag_effects_for_location(
993 self.tcx, self.mir, self.env, loc, |path, ds| {
994 if ds == DropFlagState::Absent || allow_initializations {
995 self.set_drop_flag(loc, path, ds)
996 }
997 }
998 )
999 }
1000
1001 // There may be a critical edge after this call,
1002 // so mark the return as initialized *before* the
1003 // call.
1004 if let TerminatorKind::Call {
1005 destination: Some((ref lv, _)), cleanup: None, ..
1006 } = data.terminator().kind {
1007 assert!(!self.patch.is_patched(bb));
1008
1009 let loc = Location { block: bb, statement_index: data.statements.len() };
1010 let path = self.move_data().rev_lookup.find(lv);
1011 on_lookup_result_bits(
1012 self.tcx, self.mir, self.move_data(), path,
1013 |child| self.set_drop_flag(loc, child, DropFlagState::Present)
1014 );
1015 }
1016 }
1017 }
1018
1019 fn drop_flags_for_drop<'a>(&mut self,
1020 c: &DropCtxt<'a, 'tcx>,
1021 bb: BasicBlock)
1022 {
1023 let loc = self.patch.terminator_loc(self.mir, bb);
1024 on_all_children_bits(
1025 self.tcx, self.mir, self.move_data(), c.path,
1026 |child| self.set_drop_flag(loc, child, DropFlagState::Absent)
1027 );
1028 }
1029 }