1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use indexed_set
::IdxSetBuf
;
12 use super::gather_moves
::{MoveData, MovePathIndex, LookupResult}
;
13 use super::dataflow
::{MaybeInitializedLvals, MaybeUninitializedLvals}
;
14 use super::dataflow
::{DataflowResults}
;
15 use super::{drop_flag_effects_for_location, on_all_children_bits}
;
16 use super::on_lookup_result_bits
;
17 use super::{DropFlagState, MoveDataParamEnv}
;
18 use super::patch
::MirPatch
;
19 use rustc
::ty
::{self, Ty, TyCtxt}
;
20 use rustc
::ty
::subst
::{Kind, Subst, Substs}
;
21 use rustc
::mir
::repr
::*;
22 use rustc
::mir
::transform
::{Pass, MirPass, MirSource}
;
23 use rustc
::middle
::const_val
::ConstVal
;
24 use rustc
::middle
::lang_items
;
25 use rustc
::util
::nodemap
::FnvHashMap
;
26 use rustc_data_structures
::indexed_vec
::Idx
;
33 pub struct ElaborateDrops
;
35 impl<'tcx
> MirPass
<'tcx
> for ElaborateDrops
{
36 fn run_pass
<'a
>(&mut self, tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
37 src
: MirSource
, mir
: &mut Mir
<'tcx
>)
39 debug
!("elaborate_drops({:?} @ {:?})", src
, mir
.span
);
41 MirSource
::Fn(..) => {}
,
44 let id
= src
.item_id();
45 let param_env
= ty
::ParameterEnvironment
::for_item(tcx
, id
);
46 let move_data
= MoveData
::gather_moves(mir
, tcx
, ¶m_env
);
47 let elaborate_patch
= {
49 let env
= MoveDataParamEnv
{
54 super::do_dataflow(tcx
, mir
, id
, &[], &env
,
55 MaybeInitializedLvals
::new(tcx
, mir
));
57 super::do_dataflow(tcx
, mir
, id
, &[], &env
,
58 MaybeUninitializedLvals
::new(tcx
, mir
));
64 flow_inits
: flow_inits
,
65 flow_uninits
: flow_uninits
,
66 drop_flags
: FnvHashMap(),
67 patch
: MirPatch
::new(mir
),
70 elaborate_patch
.apply(mir
);
74 impl Pass
for ElaborateDrops {}
76 struct InitializationData
{
77 live
: IdxSetBuf
<MovePathIndex
>,
78 dead
: IdxSetBuf
<MovePathIndex
>
81 impl InitializationData
{
82 fn apply_location
<'a
,'tcx
>(&mut self,
83 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
85 env
: &MoveDataParamEnv
<'tcx
>,
88 drop_flag_effects_for_location(tcx
, mir
, env
, loc
, |path
, df
| {
89 debug
!("at location {:?}: setting {:?} to {:?}",
92 DropFlagState
::Present
=> {
94 self.dead
.remove(&path
);
96 DropFlagState
::Absent
=> {
98 self.live
.remove(&path
);
104 fn state(&self, path
: MovePathIndex
) -> (bool
, bool
) {
105 (self.live
.contains(&path
), self.dead
.contains(&path
))
109 impl fmt
::Debug
for InitializationData
{
110 fn fmt(&self, _f
: &mut fmt
::Formatter
) -> Result
<(), fmt
::Error
> {
115 struct ElaborateDropsCtxt
<'a
, 'tcx
: 'a
> {
116 tcx
: TyCtxt
<'a
, 'tcx
, 'tcx
>,
118 env
: &'a MoveDataParamEnv
<'tcx
>,
119 flow_inits
: DataflowResults
<MaybeInitializedLvals
<'a
, 'tcx
>>,
120 flow_uninits
: DataflowResults
<MaybeUninitializedLvals
<'a
, 'tcx
>>,
121 drop_flags
: FnvHashMap
<MovePathIndex
, Temp
>,
122 patch
: MirPatch
<'tcx
>,
125 #[derive(Copy, Clone, Debug)]
126 struct DropCtxt
<'a
, 'tcx
: 'a
> {
127 source_info
: SourceInfo
,
130 init_data
: &'a InitializationData
,
132 lvalue
: &'a Lvalue
<'tcx
>,
135 unwind
: Option
<BasicBlock
>
138 impl<'b
, 'tcx
> ElaborateDropsCtxt
<'b
, 'tcx
> {
139 fn move_data(&self) -> &'b MoveData
<'tcx
> { &self.env.move_data }
140 fn param_env(&self) -> &'b ty
::ParameterEnvironment
<'tcx
> {
144 fn initialization_data_at(&self, loc
: Location
) -> InitializationData
{
145 let mut data
= InitializationData
{
146 live
: self.flow_inits
.sets().on_entry_set_for(loc
.block
.index())
148 dead
: self.flow_uninits
.sets().on_entry_set_for(loc
.block
.index())
151 for stmt
in 0..loc
.statement_index
{
152 data
.apply_location(self.tcx
, self.mir
, self.env
,
153 Location { block: loc.block, statement_index: stmt }
);
158 fn create_drop_flag(&mut self, index
: MovePathIndex
) {
160 let patch
= &mut self.patch
;
161 self.drop_flags
.entry(index
).or_insert_with(|| {
162 patch
.new_temp(tcx
.types
.bool
)
166 fn drop_flag(&mut self, index
: MovePathIndex
) -> Option
<Lvalue
<'tcx
>> {
167 self.drop_flags
.get(&index
).map(|t
| Lvalue
::Temp(*t
))
170 /// create a patch that elaborates all drops in the input
172 fn elaborate(mut self) -> MirPatch
<'tcx
>
174 self.collect_drop_flags();
176 self.elaborate_drops();
178 self.drop_flags_on_init();
179 self.drop_flags_for_fn_rets();
180 self.drop_flags_for_args();
181 self.drop_flags_for_locs();
186 fn path_needs_drop(&self, path
: MovePathIndex
) -> bool
188 let lvalue
= &self.move_data().move_paths
[path
].lvalue
;
189 let ty
= lvalue
.ty(self.mir
, self.tcx
).to_ty(self.tcx
);
190 debug
!("path_needs_drop({:?}, {:?} : {:?})", path
, lvalue
, ty
);
192 self.tcx
.type_needs_drop_given_env(ty
, self.param_env())
195 fn collect_drop_flags(&mut self)
197 for (bb
, data
) in self.mir
.basic_blocks().iter_enumerated() {
198 let terminator
= data
.terminator();
199 let location
= match terminator
.kind
{
200 TerminatorKind
::Drop { ref location, .. }
|
201 TerminatorKind
::DropAndReplace { ref location, .. }
=> location
,
205 let init_data
= self.initialization_data_at(Location
{
207 statement_index
: data
.statements
.len()
210 let path
= self.move_data().rev_lookup
.find(location
);
211 debug
!("collect_drop_flags: {:?}, lv {:?} ({:?})",
214 let path
= match path
{
215 LookupResult
::Exact(e
) => e
,
216 LookupResult
::Parent(None
) => continue,
217 LookupResult
::Parent(Some(parent
)) => {
218 let (_maybe_live
, maybe_dead
) = init_data
.state(parent
);
220 span_bug
!(terminator
.source_info
.span
,
221 "drop of untracked, uninitialized value {:?}, lv {:?} ({:?})",
228 on_all_children_bits(self.tcx
, self.mir
, self.move_data(), path
, |child
| {
229 if self.path_needs_drop(child
) {
230 let (maybe_live
, maybe_dead
) = init_data
.state(child
);
231 debug
!("collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
232 child
, location
, path
, (maybe_live
, maybe_dead
));
233 if maybe_live
&& maybe_dead
{
234 self.create_drop_flag(child
)
241 fn elaborate_drops(&mut self)
243 for (bb
, data
) in self.mir
.basic_blocks().iter_enumerated() {
244 let loc
= Location { block: bb, statement_index: data.statements.len() }
;
245 let terminator
= data
.terminator();
247 let resume_block
= self.patch
.resume_block();
248 match terminator
.kind
{
249 TerminatorKind
::Drop { ref location, target, unwind }
=> {
250 let init_data
= self.initialization_data_at(loc
);
251 match self.move_data().rev_lookup
.find(location
) {
252 LookupResult
::Exact(path
) => {
253 self.elaborate_drop(&DropCtxt
{
254 source_info
: terminator
.source_info
,
255 is_cleanup
: data
.is_cleanup
,
256 init_data
: &init_data
,
260 unwind
: if data
.is_cleanup
{
263 Some(Option
::unwrap_or(unwind
, resume_block
))
267 LookupResult
::Parent(..) => {
268 span_bug
!(terminator
.source_info
.span
,
269 "drop of untracked value {:?}", bb
);
273 TerminatorKind
::DropAndReplace
{ ref location
, ref value
,
276 assert
!(!data
.is_cleanup
);
278 self.elaborate_replace(
289 /// Elaborate a MIR `replace` terminator. This instruction
290 /// is not directly handled by translation, and therefore
291 /// must be desugared.
293 /// The desugaring drops the location if needed, and then writes
294 /// the value (including setting the drop flag) over it in *both* arms.
296 /// The `replace` terminator can also be called on lvalues that
297 /// are not tracked by elaboration (for example,
298 /// `replace x[i] <- tmp0`). The borrow checker requires that
299 /// these locations are initialized before the assignment,
300 /// so we just generate an unconditional drop.
301 fn elaborate_replace(
304 location
: &Lvalue
<'tcx
>,
305 value
: &Operand
<'tcx
>,
307 unwind
: Option
<BasicBlock
>)
310 let data
= &self.mir
[bb
];
311 let terminator
= data
.terminator();
313 let assign
= Statement
{
314 kind
: StatementKind
::Assign(location
.clone(), Rvalue
::Use(value
.clone())),
315 source_info
: terminator
.source_info
318 let unwind
= unwind
.unwrap_or(self.patch
.resume_block());
319 let unwind
= self.patch
.new_block(BasicBlockData
{
320 statements
: vec
![assign
.clone()],
321 terminator
: Some(Terminator
{
322 kind
: TerminatorKind
::Goto { target: unwind }
,
328 let target
= self.patch
.new_block(BasicBlockData
{
329 statements
: vec
![assign
],
330 terminator
: Some(Terminator
{
331 kind
: TerminatorKind
::Goto { target: target }
,
334 is_cleanup
: data
.is_cleanup
,
337 match self.move_data().rev_lookup
.find(location
) {
338 LookupResult
::Exact(path
) => {
339 debug
!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator
, path
);
340 let init_data
= self.initialization_data_at(loc
);
342 self.elaborate_drop(&DropCtxt
{
343 source_info
: terminator
.source_info
,
344 is_cleanup
: data
.is_cleanup
,
345 init_data
: &init_data
,
351 on_all_children_bits(self.tcx
, self.mir
, self.move_data(), path
, |child
| {
352 self.set_drop_flag(Location { block: target, statement_index: 0 }
,
353 child
, DropFlagState
::Present
);
354 self.set_drop_flag(Location { block: unwind, statement_index: 0 }
,
355 child
, DropFlagState
::Present
);
358 LookupResult
::Parent(parent
) => {
359 // drop and replace behind a pointer/array/whatever. The location
360 // must be initialized.
361 debug
!("elaborate_drop_and_replace({:?}) - untracked {:?}", terminator
, parent
);
362 self.patch
.patch_terminator(bb
, TerminatorKind
::Drop
{
363 location
: location
.clone(),
371 /// This elaborates a single drop instruction, located at `bb`, and
374 /// The elaborated drop checks the drop flags to only drop what
377 /// In addition, the relevant drop flags also need to be cleared
378 /// to avoid double-drops. However, in the middle of a complex
379 /// drop, one must avoid clearing some of the flags before they
380 /// are read, as that would cause a memory leak.
382 /// In particular, when dropping an ADT, multiple fields may be
383 /// joined together under the `rest` subpath. They are all controlled
384 /// by the primary drop flag, but only the last rest-field dropped
385 /// should clear it (and it must also not clear anything else).
387 /// FIXME: I think we should just control the flags externally
388 /// and then we do not need this machinery.
389 fn elaborate_drop
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>, bb
: BasicBlock
) {
390 debug
!("elaborate_drop({:?})", c
);
392 let mut some_live
= false;
393 let mut some_dead
= false;
394 let mut children_count
= 0;
395 on_all_children_bits(
396 self.tcx
, self.mir
, self.move_data(),
398 if self.path_needs_drop(child
) {
399 let (live
, dead
) = c
.init_data
.state(child
);
400 debug
!("elaborate_drop: state({:?}) = {:?}",
401 child
, (live
, dead
));
408 debug
!("elaborate_drop({:?}): live - {:?}", c
,
409 (some_live
, some_dead
));
410 match (some_live
, some_dead
) {
411 (false, false) | (false, true) => {
412 // dead drop - patch it out
413 self.patch
.patch_terminator(bb
, TerminatorKind
::Goto
{
418 // static drop - just set the flag
419 self.patch
.patch_terminator(bb
, TerminatorKind
::Drop
{
420 location
: c
.lvalue
.clone(),
424 self.drop_flags_for_drop(c
, bb
);
428 let drop_bb
= if children_count
== 1 || self.must_complete_drop(c
) {
429 self.conditional_drop(c
)
433 self.patch
.patch_terminator(bb
, TerminatorKind
::Goto
{
440 /// Return the lvalue and move path for each field of `variant`,
441 /// (the move path is `None` if the field is a rest field).
442 fn move_paths_for_fields(&self,
443 base_lv
: &Lvalue
<'tcx
>,
444 variant_path
: MovePathIndex
,
445 variant
: ty
::VariantDef
<'tcx
>,
446 substs
: &'tcx Substs
<'tcx
>)
447 -> Vec
<(Lvalue
<'tcx
>, Option
<MovePathIndex
>)>
449 variant
.fields
.iter().enumerate().map(|(i
, f
)| {
451 super::move_path_children_matching(self.move_data(), variant_path
, |p
| {
454 elem
: ProjectionElem
::Field(idx
, _
), ..
455 } => idx
.index() == i
,
461 self.tcx
.normalize_associated_type_in_env(
462 &f
.ty(self.tcx
, substs
),
465 (base_lv
.clone().field(Field
::new(i
), field_ty
), subpath
)
469 /// Create one-half of the drop ladder for a list of fields, and return
470 /// the list of steps in it in reverse order.
472 /// `unwind_ladder` is such a list of steps in reverse order,
473 /// which is called instead of the next step if the drop unwinds
474 /// (the first field is never reached). If it is `None`, all
475 /// unwind targets are left blank.
476 fn drop_halfladder
<'a
>(&mut self,
477 c
: &DropCtxt
<'a
, 'tcx
>,
478 unwind_ladder
: Option
<Vec
<BasicBlock
>>,
480 fields
: &[(Lvalue
<'tcx
>, Option
<MovePathIndex
>)],
485 let mut unwind_succ
= if is_cleanup
{
490 let mut update_drop_flag
= true;
492 fields
.iter().rev().enumerate().map(|(i
, &(ref lv
, path
))| {
493 let drop_block
= match path
{
495 debug
!("drop_ladder: for std field {} ({:?})", i
, lv
);
497 self.elaborated_drop_block(&DropCtxt
{
498 source_info
: c
.source_info
,
499 is_cleanup
: is_cleanup
,
500 init_data
: c
.init_data
,
508 debug
!("drop_ladder: for rest field {} ({:?})", i
, lv
);
510 let blk
= self.complete_drop(&DropCtxt
{
511 source_info
: c
.source_info
,
512 is_cleanup
: is_cleanup
,
513 init_data
: c
.init_data
,
518 }, update_drop_flag
);
520 // the drop flag has been updated - updating
521 // it again would clobber it.
522 update_drop_flag
= false;
529 unwind_succ
= unwind_ladder
.as_ref().map(|p
| p
[i
]);
535 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
537 /// For example, with 3 fields, the drop ladder is
540 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
542 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
544 /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
546 /// ELAB(drop location.1 [target=.c2])
548 /// ELAB(drop location.2 [target=`c.unwind])
549 fn drop_ladder
<'a
>(&mut self,
550 c
: &DropCtxt
<'a
, 'tcx
>,
551 fields
: Vec
<(Lvalue
<'tcx
>, Option
<MovePathIndex
>)>)
554 debug
!("drop_ladder({:?}, {:?})", c
, fields
);
556 let mut fields
= fields
;
557 fields
.retain(|&(ref lvalue
, _
)| {
558 let ty
= lvalue
.ty(self.mir
, self.tcx
).to_ty(self.tcx
);
559 self.tcx
.type_needs_drop_given_env(ty
, self.param_env())
562 debug
!("drop_ladder - fields needing drop: {:?}", fields
);
564 let unwind_ladder
= if c
.is_cleanup
{
567 Some(self.drop_halfladder(c
, None
, c
.unwind
.unwrap(), &fields
, true))
570 self.drop_halfladder(c
, unwind_ladder
, c
.succ
, &fields
, c
.is_cleanup
)
571 .last().cloned().unwrap_or(c
.succ
)
574 fn open_drop_for_tuple
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>, tys
: &[Ty
<'tcx
>])
577 debug
!("open_drop_for_tuple({:?}, {:?})", c
, tys
);
579 let fields
= tys
.iter().enumerate().map(|(i
, &ty
)| {
580 (c
.lvalue
.clone().field(Field
::new(i
), ty
),
581 super::move_path_children_matching(
582 self.move_data(), c
.path
, |proj
| match proj
{
584 elem
: ProjectionElem
::Field(f
, _
), ..
591 self.drop_ladder(c
, fields
)
594 fn open_drop_for_box
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>, ty
: Ty
<'tcx
>)
597 debug
!("open_drop_for_box({:?}, {:?})", c
, ty
);
599 let interior_path
= super::move_path_children_matching(
600 self.move_data(), c
.path
, |proj
| match proj
{
601 &Projection { elem: ProjectionElem::Deref, .. }
=> true,
605 let interior
= c
.lvalue
.clone().deref();
606 let inner_c
= DropCtxt
{
608 unwind
: c
.unwind
.map(|u
| {
609 self.box_free_block(c
, ty
, u
, true)
611 succ
: self.box_free_block(c
, ty
, c
.succ
, c
.is_cleanup
),
616 self.elaborated_drop_block(&inner_c
)
619 fn open_drop_for_variant
<'a
>(&mut self,
620 c
: &DropCtxt
<'a
, 'tcx
>,
621 drop_block
: &mut Option
<BasicBlock
>,
622 adt
: ty
::AdtDef
<'tcx
>,
623 substs
: &'tcx Substs
<'tcx
>,
624 variant_index
: usize)
627 let subpath
= super::move_path_children_matching(
628 self.move_data(), c
.path
, |proj
| match proj
{
630 elem
: ProjectionElem
::Downcast(_
, idx
), ..
631 } => idx
== variant_index
,
635 if let Some(variant_path
) = subpath
{
636 let base_lv
= c
.lvalue
.clone().elem(
637 ProjectionElem
::Downcast(adt
, variant_index
)
639 let fields
= self.move_paths_for_fields(
642 &adt
.variants
[variant_index
],
644 self.drop_ladder(c
, fields
)
646 // variant not found - drop the entire enum
647 if let None
= *drop_block
{
648 *drop_block
= Some(self.complete_drop(c
, true));
650 return drop_block
.unwrap();
654 fn open_drop_for_adt
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>,
655 adt
: ty
::AdtDef
<'tcx
>, substs
: &'tcx Substs
<'tcx
>)
657 debug
!("open_drop_for_adt({:?}, {:?}, {:?})", c
, adt
, substs
);
659 let mut drop_block
= None
;
661 match adt
.variants
.len() {
663 let fields
= self.move_paths_for_fields(
669 self.drop_ladder(c
, fields
)
672 let variant_drops
: Vec
<BasicBlock
> =
673 (0..adt
.variants
.len()).map(|i
| {
674 self.open_drop_for_variant(c
, &mut drop_block
,
678 // If there are multiple variants, then if something
679 // is present within the enum the discriminant, tracked
680 // by the rest path, must be initialized.
682 // Additionally, we do not want to switch on the
683 // discriminant after it is free-ed, because that
684 // way lies only trouble.
686 let switch_block
= self.new_block(
687 c
, c
.is_cleanup
, TerminatorKind
::Switch
{
688 discr
: c
.lvalue
.clone(),
690 targets
: variant_drops
693 self.drop_flag_test_block(c
, switch_block
)
698 /// The slow-path - create an "open", elaborated drop for a type
699 /// which is moved-out-of only partially, and patch `bb` to a jump
700 /// to it. This must not be called on ADTs with a destructor,
701 /// as these can't be moved-out-of, except for `Box<T>`, which is
704 /// This creates a "drop ladder" that drops the needed fields of the
705 /// ADT, both in the success case or if one of the destructors fail.
706 fn open_drop
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>) -> BasicBlock
{
707 let ty
= c
.lvalue
.ty(self.mir
, self.tcx
).to_ty(self.tcx
);
709 ty
::TyAdt(def
, substs
) => {
710 self.open_drop_for_adt(c
, def
, substs
)
712 ty
::TyTuple(tys
) | ty
::TyClosure(_
, ty
::ClosureSubsts
{
715 self.open_drop_for_tuple(c
, tys
)
718 self.open_drop_for_box(c
, ty
)
720 _
=> bug
!("open drop from non-ADT `{:?}`", ty
)
724 /// Return a basic block that drop an lvalue using the context
725 /// and path in `c`. If `update_drop_flag` is true, also
729 /// if(update_drop_flag) FLAG(c.path) = false
731 fn complete_drop
<'a
>(
733 c
: &DropCtxt
<'a
, 'tcx
>,
734 update_drop_flag
: bool
)
737 debug
!("complete_drop({:?},{:?})", c
, update_drop_flag
);
739 let drop_block
= self.drop_block(c
);
740 if update_drop_flag
{
742 Location { block: drop_block, statement_index: 0 }
,
744 DropFlagState
::Absent
748 self.drop_flag_test_block(c
, drop_block
)
751 /// Create a simple conditional drop.
754 /// FLAGS(c.lv) = false
756 fn conditional_drop
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>)
759 debug
!("conditional_drop({:?})", c
);
760 let drop_bb
= self.drop_block(c
);
761 self.drop_flags_for_drop(c
, drop_bb
);
763 self.drop_flag_test_block(c
, drop_bb
)
766 fn new_block
<'a
>(&mut self,
767 c
: &DropCtxt
<'a
, 'tcx
>,
769 k
: TerminatorKind
<'tcx
>)
772 self.patch
.new_block(BasicBlockData
{
774 terminator
: Some(Terminator
{
775 source_info
: c
.source_info
, kind
: k
777 is_cleanup
: is_cleanup
781 fn elaborated_drop_block
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>) -> BasicBlock
{
782 debug
!("elaborated_drop_block({:?})", c
);
783 let blk
= self.drop_block(c
);
784 self.elaborate_drop(c
, blk
);
788 fn drop_flag_test_block
<'a
>(&mut self,
789 c
: &DropCtxt
<'a
, 'tcx
>,
792 self.drop_flag_test_block_with_succ(c
, c
.is_cleanup
, on_set
, c
.succ
)
795 fn drop_flag_test_block_with_succ
<'a
>(&mut self,
796 c
: &DropCtxt
<'a
, 'tcx
>,
799 on_unset
: BasicBlock
)
802 let (maybe_live
, maybe_dead
) = c
.init_data
.state(c
.path
);
803 debug
!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
804 c
, is_cleanup
, on_set
, (maybe_live
, maybe_dead
));
806 match (maybe_live
, maybe_dead
) {
807 (false, _
) => on_unset
,
808 (true, false) => on_set
,
810 let flag
= self.drop_flag(c
.path
).unwrap();
811 self.new_block(c
, is_cleanup
, TerminatorKind
::If
{
812 cond
: Operand
::Consume(flag
),
813 targets
: (on_set
, on_unset
)
819 fn drop_block
<'a
>(&mut self, c
: &DropCtxt
<'a
, 'tcx
>) -> BasicBlock
{
820 self.new_block(c
, c
.is_cleanup
, TerminatorKind
::Drop
{
821 location
: c
.lvalue
.clone(),
827 fn box_free_block
<'a
>(
829 c
: &DropCtxt
<'a
, 'tcx
>,
834 let block
= self.unelaborated_free_block(c
, ty
, target
, is_cleanup
);
835 self.drop_flag_test_block_with_succ(c
, is_cleanup
, block
, target
)
838 fn unelaborated_free_block
<'a
>(
840 c
: &DropCtxt
<'a
, 'tcx
>,
845 let mut statements
= vec
![];
846 if let Some(&flag
) = self.drop_flags
.get(&c
.path
) {
847 statements
.push(Statement
{
848 source_info
: c
.source_info
,
849 kind
: StatementKind
::Assign(
851 self.constant_bool(c
.source_info
.span
, false)
857 let unit_temp
= Lvalue
::Temp(self.patch
.new_temp(tcx
.mk_nil()));
858 let free_func
= tcx
.lang_items
.require(lang_items
::BoxFreeFnLangItem
)
859 .unwrap_or_else(|e
| tcx
.sess
.fatal(&e
));
860 let substs
= Substs
::new(tcx
, iter
::once(Kind
::from(ty
)));
861 let fty
= tcx
.lookup_item_type(free_func
).ty
.subst(tcx
, substs
);
863 self.patch
.new_block(BasicBlockData
{
864 statements
: statements
,
865 terminator
: Some(Terminator
{
866 source_info
: c
.source_info
, kind
: TerminatorKind
::Call
{
867 func
: Operand
::Constant(Constant
{
868 span
: c
.source_info
.span
,
870 literal
: Literal
::Item
{
875 args
: vec
![Operand
::Consume(c
.lvalue
.clone())],
876 destination
: Some((unit_temp
, target
)),
880 is_cleanup
: is_cleanup
884 fn must_complete_drop
<'a
>(&self, c
: &DropCtxt
<'a
, 'tcx
>) -> bool
{
885 // if we have a destuctor, we must *not* split the drop.
887 // dataflow can create unneeded children in some cases
888 // - be sure to ignore them.
890 let ty
= c
.lvalue
.ty(self.mir
, self.tcx
).to_ty(self.tcx
);
893 ty
::TyAdt(def
, _
) => {
895 self.tcx
.sess
.span_warn(
897 &format
!("dataflow bug??? moving out of type with dtor {:?}",
908 fn constant_bool(&self, span
: Span
, val
: bool
) -> Rvalue
<'tcx
> {
909 Rvalue
::Use(Operand
::Constant(Constant
{
911 ty
: self.tcx
.types
.bool
,
912 literal
: Literal
::Value { value: ConstVal::Bool(val) }
916 fn set_drop_flag(&mut self, loc
: Location
, path
: MovePathIndex
, val
: DropFlagState
) {
917 if let Some(&flag
) = self.drop_flags
.get(&path
) {
918 let span
= self.patch
.source_info_for_location(self.mir
, loc
).span
;
919 let val
= self.constant_bool(span
, val
.value());
920 self.patch
.add_assign(loc
, Lvalue
::Temp(flag
), val
);
924 fn drop_flags_on_init(&mut self) {
925 let loc
= Location { block: START_BLOCK, statement_index: 0 }
;
926 let span
= self.patch
.source_info_for_location(self.mir
, loc
).span
;
927 let false_
= self.constant_bool(span
, false);
928 for flag
in self.drop_flags
.values() {
929 self.patch
.add_assign(loc
, Lvalue
::Temp(*flag
), false_
.clone());
933 fn drop_flags_for_fn_rets(&mut self) {
934 for (bb
, data
) in self.mir
.basic_blocks().iter_enumerated() {
935 if let TerminatorKind
::Call
{
936 destination
: Some((ref lv
, tgt
)), cleanup
: Some(_
), ..
937 } = data
.terminator().kind
{
938 assert
!(!self.patch
.is_patched(bb
));
940 let loc
= Location { block: tgt, statement_index: 0 }
;
941 let path
= self.move_data().rev_lookup
.find(lv
);
942 on_lookup_result_bits(
943 self.tcx
, self.mir
, self.move_data(), path
,
944 |child
| self.set_drop_flag(loc
, child
, DropFlagState
::Present
)
950 fn drop_flags_for_args(&mut self) {
951 let loc
= Location { block: START_BLOCK, statement_index: 0 }
;
952 super::drop_flag_effects_for_function_entry(
953 self.tcx
, self.mir
, self.env
, |path
, ds
| {
954 self.set_drop_flag(loc
, path
, ds
);
959 fn drop_flags_for_locs(&mut self) {
960 // We intentionally iterate only over the *old* basic blocks.
962 // Basic blocks created by drop elaboration update their
963 // drop flags by themselves, to avoid the drop flags being
964 // clobbered before they are read.
966 for (bb
, data
) in self.mir
.basic_blocks().iter_enumerated() {
967 debug
!("drop_flags_for_locs({:?})", data
);
968 for i
in 0..(data
.statements
.len()+1) {
969 debug
!("drop_flag_for_locs: stmt {}", i
);
970 let mut allow_initializations
= true;
971 if i
== data
.statements
.len() {
972 match data
.terminator().kind
{
973 TerminatorKind
::Drop { .. }
=> {
974 // drop elaboration should handle that by itself
977 TerminatorKind
::DropAndReplace { .. }
=> {
978 // this contains the move of the source and
979 // the initialization of the destination. We
980 // only want the former - the latter is handled
981 // by the elaboration code and must be done
982 // *after* the destination is dropped.
983 assert
!(self.patch
.is_patched(bb
));
984 allow_initializations
= false;
987 assert
!(!self.patch
.is_patched(bb
));
991 let loc
= Location { block: bb, statement_index: i }
;
992 super::drop_flag_effects_for_location(
993 self.tcx
, self.mir
, self.env
, loc
, |path
, ds
| {
994 if ds
== DropFlagState
::Absent
|| allow_initializations
{
995 self.set_drop_flag(loc
, path
, ds
)
1001 // There may be a critical edge after this call,
1002 // so mark the return as initialized *before* the
1004 if let TerminatorKind
::Call
{
1005 destination
: Some((ref lv
, _
)), cleanup
: None
, ..
1006 } = data
.terminator().kind
{
1007 assert
!(!self.patch
.is_patched(bb
));
1009 let loc
= Location { block: bb, statement_index: data.statements.len() }
;
1010 let path
= self.move_data().rev_lookup
.find(lv
);
1011 on_lookup_result_bits(
1012 self.tcx
, self.mir
, self.move_data(), path
,
1013 |child
| self.set_drop_flag(loc
, child
, DropFlagState
::Present
)
1019 fn drop_flags_for_drop
<'a
>(&mut self,
1020 c
: &DropCtxt
<'a
, 'tcx
>,
1023 let loc
= self.patch
.terminator_loc(self.mir
, bb
);
1024 on_all_children_bits(
1025 self.tcx
, self.mir
, self.move_data(), c
.path
,
1026 |child
| self.set_drop_flag(loc
, child
, DropFlagState
::Absent
)