1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
14 use rustc
::middle
::lang_items
;
15 use rustc
::traits
::Reveal
;
16 use rustc
::ty
::{self, Ty, TyCtxt}
;
17 use rustc
::ty
::subst
::Substs
;
18 use rustc
::ty
::util
::IntTypeExt
;
19 use rustc_data_structures
::indexed_vec
::Idx
;
20 use util
::patch
::MirPatch
;
24 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
25 pub enum DropFlagState
{
26 Present
, // i.e. initialized
27 Absent
, // i.e. deinitialized or "moved"
31 pub fn value(self) -> bool
{
33 DropFlagState
::Present
=> true,
34 DropFlagState
::Absent
=> false
48 pub enum DropFlagMode
{
53 #[derive(Copy, Clone, Debug)]
60 fn is_cleanup(self) -> bool
{
62 Unwind
::To(..) => false,
63 Unwind
::InCleanup
=> true
67 fn into_option(self) -> Option
<BasicBlock
> {
69 Unwind
::To(bb
) => Some(bb
),
70 Unwind
::InCleanup
=> None
,
74 fn map
<F
>(self, f
: F
) -> Self where F
: FnOnce(BasicBlock
) -> BasicBlock
{
76 Unwind
::To(bb
) => Unwind
::To(f(bb
)),
77 Unwind
::InCleanup
=> Unwind
::InCleanup
82 pub trait DropElaborator
<'a
, 'tcx
: 'a
> : fmt
::Debug
{
83 type Path
: Copy
+ fmt
::Debug
;
85 fn patch(&mut self) -> &mut MirPatch
<'tcx
>;
86 fn mir(&self) -> &'a Mir
<'tcx
>;
87 fn tcx(&self) -> TyCtxt
<'a
, 'tcx
, 'tcx
>;
88 fn param_env(&self) -> ty
::ParamEnv
<'tcx
>;
90 fn drop_style(&self, path
: Self::Path
, mode
: DropFlagMode
) -> DropStyle
;
91 fn get_drop_flag(&mut self, path
: Self::Path
) -> Option
<Operand
<'tcx
>>;
92 fn clear_drop_flag(&mut self, location
: Location
, path
: Self::Path
, mode
: DropFlagMode
);
95 fn field_subpath(&self, path
: Self::Path
, field
: Field
) -> Option
<Self::Path
>;
96 fn deref_subpath(&self, path
: Self::Path
) -> Option
<Self::Path
>;
97 fn downcast_subpath(&self, path
: Self::Path
, variant
: usize) -> Option
<Self::Path
>;
98 fn array_subpath(&self, path
: Self::Path
, index
: u32, size
: u32) -> Option
<Self::Path
>;
102 struct DropCtxt
<'l
, 'b
: 'l
, 'tcx
: 'b
, D
>
103 where D
: DropElaborator
<'b
, 'tcx
> + 'l
105 elaborator
: &'l
mut D
,
107 source_info
: SourceInfo
,
109 place
: &'l Place
<'tcx
>,
115 pub fn elaborate_drop
<'b
, 'tcx
, D
>(
117 source_info
: SourceInfo
,
123 where D
: DropElaborator
<'b
, 'tcx
>
126 elaborator
, source_info
, place
, path
, succ
, unwind
130 impl<'l
, 'b
, 'tcx
, D
> DropCtxt
<'l
, 'b
, 'tcx
, D
>
131 where D
: DropElaborator
<'b
, 'tcx
>
133 fn place_ty(&self, place
: &Place
<'tcx
>) -> Ty
<'tcx
> {
134 place
.ty(self.elaborator
.mir(), self.tcx()).to_ty(self.tcx())
137 fn tcx(&self) -> TyCtxt
<'b
, 'tcx
, 'tcx
> {
138 self.elaborator
.tcx()
141 /// This elaborates a single drop instruction, located at `bb`, and
144 /// The elaborated drop checks the drop flags to only drop what
147 /// In addition, the relevant drop flags also need to be cleared
148 /// to avoid double-drops. However, in the middle of a complex
149 /// drop, one must avoid clearing some of the flags before they
150 /// are read, as that would cause a memory leak.
152 /// In particular, when dropping an ADT, multiple fields may be
153 /// joined together under the `rest` subpath. They are all controlled
154 /// by the primary drop flag, but only the last rest-field dropped
155 /// should clear it (and it must also not clear anything else).
157 /// FIXME: I think we should just control the flags externally
158 /// and then we do not need this machinery.
159 pub fn elaborate_drop
<'a
>(&mut self, bb
: BasicBlock
) {
160 debug
!("elaborate_drop({:?})", self);
161 let style
= self.elaborator
.drop_style(self.path
, DropFlagMode
::Deep
);
162 debug
!("elaborate_drop({:?}): live - {:?}", self, style
);
165 self.elaborator
.patch().patch_terminator(bb
, TerminatorKind
::Goto
{
169 DropStyle
::Static
=> {
170 let loc
= self.terminator_loc(bb
);
171 self.elaborator
.clear_drop_flag(loc
, self.path
, DropFlagMode
::Deep
);
172 self.elaborator
.patch().patch_terminator(bb
, TerminatorKind
::Drop
{
173 location
: self.place
.clone(),
175 unwind
: self.unwind
.into_option(),
178 DropStyle
::Conditional
=> {
179 let unwind
= self.unwind
; // FIXME(#43234)
180 let succ
= self.succ
;
181 let drop_bb
= self.complete_drop(Some(DropFlagMode
::Deep
), succ
, unwind
);
182 self.elaborator
.patch().patch_terminator(bb
, TerminatorKind
::Goto
{
187 let drop_bb
= self.open_drop();
188 self.elaborator
.patch().patch_terminator(bb
, TerminatorKind
::Goto
{
195 /// Return the place and move path for each field of `variant`,
196 /// (the move path is `None` if the field is a rest field).
197 fn move_paths_for_fields(&self,
198 base_place
: &Place
<'tcx
>,
199 variant_path
: D
::Path
,
200 variant
: &'tcx ty
::VariantDef
,
201 substs
: &'tcx Substs
<'tcx
>)
202 -> Vec
<(Place
<'tcx
>, Option
<D
::Path
>)>
204 variant
.fields
.iter().enumerate().map(|(i
, f
)| {
205 let field
= Field
::new(i
);
206 let subpath
= self.elaborator
.field_subpath(variant_path
, field
);
208 assert_eq
!(self.elaborator
.param_env().reveal
, Reveal
::All
);
209 let field_ty
= self.tcx().normalize_erasing_regions(
210 self.elaborator
.param_env(),
211 f
.ty(self.tcx(), substs
),
213 (base_place
.clone().field(field
, field_ty
), subpath
)
217 fn drop_subpath(&mut self,
219 path
: Option
<D
::Path
>,
224 if let Some(path
) = path
{
225 debug
!("drop_subpath: for std field {:?}", place
);
228 elaborator
: self.elaborator
,
229 source_info
: self.source_info
,
230 path
, place
, succ
, unwind
,
231 }.elaborated_drop_block()
233 debug
!("drop_subpath: for rest field {:?}", place
);
236 elaborator
: self.elaborator
,
237 source_info
: self.source_info
,
239 // Using `self.path` here to condition the drop on
240 // our own drop flag.
242 }.complete_drop(None
, succ
, unwind
)
246 /// Create one-half of the drop ladder for a list of fields, and return
247 /// the list of steps in it in reverse order, with the first step
248 /// dropping 0 fields and so on.
250 /// `unwind_ladder` is such a list of steps in reverse order,
251 /// which is called if the matching step of the drop glue panics.
252 fn drop_halfladder(&mut self,
253 unwind_ladder
: &[Unwind
],
254 mut succ
: BasicBlock
,
255 fields
: &[(Place
<'tcx
>, Option
<D
::Path
>)])
258 Some(succ
).into_iter().chain(
259 fields
.iter().rev().zip(unwind_ladder
)
260 .map(|(&(ref place
, path
), &unwind_succ
)| {
261 succ
= self.drop_subpath(place
, path
, succ
, unwind_succ
);
267 fn drop_ladder_bottom(&mut self) -> (BasicBlock
, Unwind
) {
268 // Clear the "master" drop flag at the end. This is needed
269 // because the "master" drop protects the ADT's discriminant,
270 // which is invalidated after the ADT is dropped.
271 let (succ
, unwind
) = (self.succ
, self.unwind
); // FIXME(#43234)
273 self.drop_flag_reset_block(DropFlagMode
::Shallow
, succ
, unwind
),
274 unwind
.map(|unwind
| {
275 self.drop_flag_reset_block(DropFlagMode
::Shallow
, unwind
, Unwind
::InCleanup
)
280 /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
282 /// For example, with 3 fields, the drop ladder is
285 /// ELAB(drop location.0 [target=.d1, unwind=.c1])
287 /// ELAB(drop location.1 [target=.d2, unwind=.c2])
289 /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
291 /// ELAB(drop location.1 [target=.c2])
293 /// ELAB(drop location.2 [target=`self.unwind`])
295 /// NOTE: this does not clear the master drop flag, so you need
296 /// to point succ/unwind on a `drop_ladder_bottom`.
297 fn drop_ladder
<'a
>(&mut self,
298 fields
: Vec
<(Place
<'tcx
>, Option
<D
::Path
>)>,
301 -> (BasicBlock
, Unwind
)
303 debug
!("drop_ladder({:?}, {:?})", self, fields
);
305 let mut fields
= fields
;
306 fields
.retain(|&(ref place
, _
)| {
307 self.place_ty(place
).needs_drop(self.tcx(), self.elaborator
.param_env())
310 debug
!("drop_ladder - fields needing drop: {:?}", fields
);
312 let unwind_ladder
= vec
![Unwind
::InCleanup
; fields
.len() + 1];
313 let unwind_ladder
: Vec
<_
> = if let Unwind
::To(target
) = unwind
{
314 let halfladder
= self.drop_halfladder(&unwind_ladder
, target
, &fields
);
315 halfladder
.into_iter().map(Unwind
::To
).collect()
321 self.drop_halfladder(&unwind_ladder
, succ
, &fields
);
323 (*normal_ladder
.last().unwrap(), *unwind_ladder
.last().unwrap())
326 fn open_drop_for_tuple
<'a
>(&mut self, tys
: &[Ty
<'tcx
>])
329 debug
!("open_drop_for_tuple({:?}, {:?})", self, tys
);
331 let fields
= tys
.iter().enumerate().map(|(i
, &ty
)| {
332 (self.place
.clone().field(Field
::new(i
), ty
),
333 self.elaborator
.field_subpath(self.path
, Field
::new(i
)))
336 let (succ
, unwind
) = self.drop_ladder_bottom();
337 self.drop_ladder(fields
, succ
, unwind
).0
340 fn open_drop_for_box
<'a
>(&mut self, adt
: &'tcx ty
::AdtDef
, substs
: &'tcx Substs
<'tcx
>)
343 debug
!("open_drop_for_box({:?}, {:?}, {:?})", self, adt
, substs
);
345 let interior
= self.place
.clone().deref();
346 let interior_path
= self.elaborator
.deref_subpath(self.path
);
348 let succ
= self.succ
; // FIXME(#43234)
349 let unwind
= self.unwind
;
350 let succ
= self.box_free_block(adt
, substs
, succ
, unwind
);
351 let unwind_succ
= self.unwind
.map(|unwind
| {
352 self.box_free_block(adt
, substs
, unwind
, Unwind
::InCleanup
)
355 self.drop_subpath(&interior
, interior_path
, succ
, unwind_succ
)
358 fn open_drop_for_adt
<'a
>(&mut self, adt
: &'tcx ty
::AdtDef
, substs
: &'tcx Substs
<'tcx
>)
360 debug
!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt
, substs
);
361 if adt
.variants
.len() == 0 {
362 return self.elaborator
.patch().new_block(BasicBlockData
{
364 terminator
: Some(Terminator
{
365 source_info
: self.source_info
,
366 kind
: TerminatorKind
::Unreachable
368 is_cleanup
: self.unwind
.is_cleanup()
372 let contents_drop
= if adt
.is_union() {
373 (self.succ
, self.unwind
)
375 self.open_drop_for_adt_contents(adt
, substs
)
378 if adt
.has_dtor(self.tcx()) {
379 self.destructor_call_block(contents_drop
)
385 fn open_drop_for_adt_contents(&mut self, adt
: &'tcx ty
::AdtDef
,
386 substs
: &'tcx Substs
<'tcx
>)
387 -> (BasicBlock
, Unwind
) {
388 let (succ
, unwind
) = self.drop_ladder_bottom();
390 let fields
= self.move_paths_for_fields(
396 self.drop_ladder(fields
, succ
, unwind
)
398 self.open_drop_for_multivariant(adt
, substs
, succ
, unwind
)
402 fn open_drop_for_multivariant(&mut self, adt
: &'tcx ty
::AdtDef
,
403 substs
: &'tcx Substs
<'tcx
>,
406 -> (BasicBlock
, Unwind
) {
407 let mut values
= Vec
::with_capacity(adt
.variants
.len());
408 let mut normal_blocks
= Vec
::with_capacity(adt
.variants
.len());
409 let mut unwind_blocks
= if unwind
.is_cleanup() {
412 Some(Vec
::with_capacity(adt
.variants
.len()))
415 let mut have_otherwise
= false;
417 for (variant_index
, discr
) in adt
.discriminants(self.tcx()).enumerate() {
418 let subpath
= self.elaborator
.downcast_subpath(
419 self.path
, variant_index
);
420 if let Some(variant_path
) = subpath
{
421 let base_place
= self.place
.clone().elem(
422 ProjectionElem
::Downcast(adt
, variant_index
)
424 let fields
= self.move_paths_for_fields(
427 &adt
.variants
[variant_index
],
429 values
.push(discr
.val
);
430 if let Unwind
::To(unwind
) = unwind
{
431 // We can't use the half-ladder from the original
432 // drop ladder, because this breaks the
433 // "funclet can't have 2 successor funclets"
434 // requirement from MSVC:
436 // switch unwind-switch
438 // v1.0 v2.0 v2.0-unwind v1.0-unwind
440 // v1.1-unwind v2.1-unwind |
442 // \-------------------------------/
444 // Create a duplicate half-ladder to avoid that. We
445 // could technically only do this on MSVC, but I
446 // I want to minimize the divergence between MSVC
449 let unwind_blocks
= unwind_blocks
.as_mut().unwrap();
450 let unwind_ladder
= vec
![Unwind
::InCleanup
; fields
.len() + 1];
452 self.drop_halfladder(&unwind_ladder
, unwind
, &fields
);
453 unwind_blocks
.push(halfladder
.last().cloned().unwrap());
455 let (normal
, _
) = self.drop_ladder(fields
, succ
, unwind
);
456 normal_blocks
.push(normal
);
458 have_otherwise
= true;
463 normal_blocks
.push(self.drop_block(succ
, unwind
));
464 if let Unwind
::To(unwind
) = unwind
{
465 unwind_blocks
.as_mut().unwrap().push(
466 self.drop_block(unwind
, Unwind
::InCleanup
)
473 (self.adt_switch_block(adt
, normal_blocks
, &values
, succ
, unwind
),
474 unwind
.map(|unwind
| {
475 self.adt_switch_block(
476 adt
, unwind_blocks
.unwrap(), &values
, unwind
, Unwind
::InCleanup
481 fn adt_switch_block(&mut self,
482 adt
: &'tcx ty
::AdtDef
,
483 blocks
: Vec
<BasicBlock
>,
488 // If there are multiple variants, then if something
489 // is present within the enum the discriminant, tracked
490 // by the rest path, must be initialized.
492 // Additionally, we do not want to switch on the
493 // discriminant after it is free-ed, because that
494 // way lies only trouble.
495 let discr_ty
= adt
.repr
.discr_type().to_ty(self.tcx());
496 let discr
= Place
::Local(self.new_temp(discr_ty
));
497 let discr_rv
= Rvalue
::Discriminant(self.place
.clone());
498 let switch_block
= BasicBlockData
{
499 statements
: vec
![self.assign(&discr
, discr_rv
)],
500 terminator
: Some(Terminator
{
501 source_info
: self.source_info
,
502 kind
: TerminatorKind
::SwitchInt
{
503 discr
: Operand
::Move(discr
),
505 values
: From
::from(values
.to_owned()),
509 is_cleanup
: unwind
.is_cleanup(),
511 let switch_block
= self.elaborator
.patch().new_block(switch_block
);
512 self.drop_flag_test_block(switch_block
, succ
, unwind
)
515 fn destructor_call_block
<'a
>(&mut self, (succ
, unwind
): (BasicBlock
, Unwind
))
518 debug
!("destructor_call_block({:?}, {:?})", self, succ
);
519 let tcx
= self.tcx();
520 let drop_trait
= tcx
.lang_items().drop_trait().unwrap();
521 let drop_fn
= tcx
.associated_items(drop_trait
).next().unwrap();
522 let ty
= self.place_ty(self.place
);
523 let substs
= tcx
.mk_substs_trait(ty
, &[]);
525 let ref_ty
= tcx
.mk_ref(tcx
.types
.re_erased
, ty
::TypeAndMut
{
527 mutbl
: hir
::Mutability
::MutMutable
529 let ref_place
= self.new_temp(ref_ty
);
530 let unit_temp
= Place
::Local(self.new_temp(tcx
.mk_nil()));
532 let result
= BasicBlockData
{
533 statements
: vec
![self.assign(
534 &Place
::Local(ref_place
),
535 Rvalue
::Ref(tcx
.types
.re_erased
,
536 BorrowKind
::Mut { allow_two_phase_borrow: false }
,
539 terminator
: Some(Terminator
{
540 kind
: TerminatorKind
::Call
{
541 func
: Operand
::function_handle(tcx
, drop_fn
.def_id
, substs
,
542 self.source_info
.span
),
543 args
: vec
![Operand
::Move(Place
::Local(ref_place
))],
544 destination
: Some((unit_temp
, succ
)),
545 cleanup
: unwind
.into_option(),
547 source_info
: self.source_info
549 is_cleanup
: unwind
.is_cleanup(),
551 self.elaborator
.patch().new_block(result
)
554 /// create a loop that drops an array:
559 /// can_go = cur == length_or_end
560 /// if can_go then succ else drop-block
564 /// cur = cur.offset(1)
566 /// ptr = &mut P[cur]
570 fn drop_loop(&mut self,
573 length_or_end
: &Place
<'tcx
>,
579 let copy
= |place
: &Place
<'tcx
>| Operand
::Copy(place
.clone());
580 let move_
= |place
: &Place
<'tcx
>| Operand
::Move(place
.clone());
581 let tcx
= self.tcx();
583 let ref_ty
= tcx
.mk_ref(tcx
.types
.re_erased
, ty
::TypeAndMut
{
585 mutbl
: hir
::Mutability
::MutMutable
587 let ptr
= &Place
::Local(self.new_temp(ref_ty
));
588 let can_go
= &Place
::Local(self.new_temp(tcx
.types
.bool
));
590 let one
= self.constant_usize(1);
591 let (ptr_next
, cur_next
) = if ptr_based
{
592 (Rvalue
::Use(copy(&Place
::Local(cur
))),
593 Rvalue
::BinaryOp(BinOp
::Offset
, copy(&Place
::Local(cur
)), one
))
597 BorrowKind
::Mut { allow_two_phase_borrow: false }
,
598 self.place
.clone().index(cur
)),
599 Rvalue
::BinaryOp(BinOp
::Add
, copy(&Place
::Local(cur
)), one
))
602 let drop_block
= BasicBlockData
{
604 self.assign(ptr
, ptr_next
),
605 self.assign(&Place
::Local(cur
), cur_next
)
607 is_cleanup
: unwind
.is_cleanup(),
608 terminator
: Some(Terminator
{
609 source_info
: self.source_info
,
610 // this gets overwritten by drop elaboration.
611 kind
: TerminatorKind
::Unreachable
,
614 let drop_block
= self.elaborator
.patch().new_block(drop_block
);
616 let loop_block
= BasicBlockData
{
618 self.assign(can_go
, Rvalue
::BinaryOp(BinOp
::Eq
,
619 copy(&Place
::Local(cur
)),
620 copy(length_or_end
)))
622 is_cleanup
: unwind
.is_cleanup(),
623 terminator
: Some(Terminator
{
624 source_info
: self.source_info
,
625 kind
: TerminatorKind
::if_(tcx
, move_(can_go
), succ
, drop_block
)
628 let loop_block
= self.elaborator
.patch().new_block(loop_block
);
630 self.elaborator
.patch().patch_terminator(drop_block
, TerminatorKind
::Drop
{
631 location
: ptr
.clone().deref(),
633 unwind
: unwind
.into_option()
639 fn open_drop_for_array(&mut self, ety
: Ty
<'tcx
>, opt_size
: Option
<u64>) -> BasicBlock
{
640 debug
!("open_drop_for_array({:?}, {:?})", ety
, opt_size
);
642 // if size_of::<ety>() == 0 {
648 if let Some(size
) = opt_size
{
649 assert
!(size
<= (u32::MAX
as u64),
650 "move out check doesn't implemented for array bigger then u32");
651 let size
= size
as u32;
652 let fields
: Vec
<(Place
<'tcx
>, Option
<D
::Path
>)> = (0..size
).map(|i
| {
653 (self.place
.clone().elem(ProjectionElem
::ConstantIndex
{
658 self.elaborator
.array_subpath(self.path
, i
, size
))
661 if fields
.iter().any(|(_
,path
)| path
.is_some()) {
662 let (succ
, unwind
) = self.drop_ladder_bottom();
663 return self.drop_ladder(fields
, succ
, unwind
).0
667 let move_
= |place
: &Place
<'tcx
>| Operand
::Move(place
.clone());
668 let tcx
= self.tcx();
669 let size
= &Place
::Local(self.new_temp(tcx
.types
.usize));
670 let size_is_zero
= &Place
::Local(self.new_temp(tcx
.types
.bool
));
671 let base_block
= BasicBlockData
{
673 self.assign(size
, Rvalue
::NullaryOp(NullOp
::SizeOf
, ety
)),
674 self.assign(size_is_zero
, Rvalue
::BinaryOp(BinOp
::Eq
,
676 self.constant_usize(0)))
678 is_cleanup
: self.unwind
.is_cleanup(),
679 terminator
: Some(Terminator
{
680 source_info
: self.source_info
,
681 kind
: TerminatorKind
::if_(
684 self.drop_loop_pair(ety
, false),
685 self.drop_loop_pair(ety
, true)
689 self.elaborator
.patch().new_block(base_block
)
692 // create a pair of drop-loops of `place`, which drops its contents
693 // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
694 // otherwise create an index loop.
695 fn drop_loop_pair(&mut self, ety
: Ty
<'tcx
>, ptr_based
: bool
) -> BasicBlock
{
696 debug
!("drop_loop_pair({:?}, {:?})", ety
, ptr_based
);
697 let tcx
= self.tcx();
698 let iter_ty
= if ptr_based
{
704 let cur
= self.new_temp(iter_ty
);
705 let length
= Place
::Local(self.new_temp(tcx
.types
.usize));
706 let length_or_end
= if ptr_based
{
707 Place
::Local(self.new_temp(iter_ty
))
712 let unwind
= self.unwind
.map(|unwind
| {
713 self.drop_loop(unwind
,
721 let succ
= self.succ
; // FIXME(#43234)
722 let loop_block
= self.drop_loop(
730 let cur
= Place
::Local(cur
);
731 let zero
= self.constant_usize(0);
732 let mut drop_block_stmts
= vec
![];
733 drop_block_stmts
.push(self.assign(&length
, Rvalue
::Len(self.place
.clone())));
735 let tmp_ty
= tcx
.mk_mut_ptr(self.place_ty(self.place
));
736 let tmp
= Place
::Local(self.new_temp(tmp_ty
));
738 // cur = tmp as *mut T;
739 // end = Offset(cur, len);
740 drop_block_stmts
.push(self.assign(&tmp
, Rvalue
::Ref(
742 BorrowKind
::Mut { allow_two_phase_borrow: false }
,
745 drop_block_stmts
.push(self.assign(&cur
, Rvalue
::Cast(
746 CastKind
::Misc
, Operand
::Move(tmp
.clone()), iter_ty
748 drop_block_stmts
.push(self.assign(&length_or_end
,
749 Rvalue
::BinaryOp(BinOp
::Offset
,
750 Operand
::Copy(cur
.clone()), Operand
::Move(length
.clone())
753 // index = 0 (length already pushed)
754 drop_block_stmts
.push(self.assign(&cur
, Rvalue
::Use(zero
)));
756 let drop_block
= self.elaborator
.patch().new_block(BasicBlockData
{
757 statements
: drop_block_stmts
,
758 is_cleanup
: unwind
.is_cleanup(),
759 terminator
: Some(Terminator
{
760 source_info
: self.source_info
,
761 kind
: TerminatorKind
::Goto { target: loop_block }
765 // FIXME(#34708): handle partially-dropped array/slice elements.
766 let reset_block
= self.drop_flag_reset_block(DropFlagMode
::Deep
, drop_block
, unwind
);
767 self.drop_flag_test_block(reset_block
, succ
, unwind
)
770 /// The slow-path - create an "open", elaborated drop for a type
771 /// which is moved-out-of only partially, and patch `bb` to a jump
772 /// to it. This must not be called on ADTs with a destructor,
773 /// as these can't be moved-out-of, except for `Box<T>`, which is
776 /// This creates a "drop ladder" that drops the needed fields of the
777 /// ADT, both in the success case or if one of the destructors fail.
778 fn open_drop
<'a
>(&mut self) -> BasicBlock
{
779 let ty
= self.place_ty(self.place
);
781 ty
::TyClosure(def_id
, substs
) => {
782 let tys
: Vec
<_
> = substs
.upvar_tys(def_id
, self.tcx()).collect();
783 self.open_drop_for_tuple(&tys
)
785 // Note that `elaborate_drops` only drops the upvars of a generator,
786 // and this is ok because `open_drop` here can only be reached
787 // within that own generator's resume function.
788 // This should only happen for the self argument on the resume function.
789 // It effetively only contains upvars until the generator transformation runs.
790 // See librustc_mir/transform/generator.rs for more details.
791 ty
::TyGenerator(def_id
, substs
, _
) => {
792 let tys
: Vec
<_
> = substs
.upvar_tys(def_id
, self.tcx()).collect();
793 self.open_drop_for_tuple(&tys
)
795 ty
::TyTuple(tys
) => {
796 self.open_drop_for_tuple(tys
)
798 ty
::TyAdt(def
, substs
) => {
800 self.open_drop_for_box(def
, substs
)
802 self.open_drop_for_adt(def
, substs
)
805 ty
::TyDynamic(..) => {
806 let unwind
= self.unwind
; // FIXME(#43234)
807 let succ
= self.succ
;
808 self.complete_drop(Some(DropFlagMode
::Deep
), succ
, unwind
)
810 ty
::TyArray(ety
, size
) => {
811 let size
= size
.assert_usize(self.tcx());
812 self.open_drop_for_array(ety
, size
)
814 ty
::TySlice(ety
) => self.open_drop_for_array(ety
, None
),
816 _
=> bug
!("open drop from non-ADT `{:?}`", ty
)
820 /// Return a basic block that drop a place using the context
821 /// and path in `c`. If `mode` is something, also clear `c`
824 /// if FLAG(self.path)
825 /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
827 fn complete_drop
<'a
>(&mut self,
828 drop_mode
: Option
<DropFlagMode
>,
830 unwind
: Unwind
) -> BasicBlock
832 debug
!("complete_drop({:?},{:?})", self, drop_mode
);
834 let drop_block
= self.drop_block(succ
, unwind
);
835 let drop_block
= if let Some(mode
) = drop_mode
{
836 self.drop_flag_reset_block(mode
, drop_block
, unwind
)
841 self.drop_flag_test_block(drop_block
, succ
, unwind
)
844 fn drop_flag_reset_block(&mut self,
847 unwind
: Unwind
) -> BasicBlock
849 debug
!("drop_flag_reset_block({:?},{:?})", self, mode
);
851 let block
= self.new_block(unwind
, TerminatorKind
::Goto { target: succ }
);
852 let block_start
= Location { block: block, statement_index: 0 }
;
853 self.elaborator
.clear_drop_flag(block_start
, self.path
, mode
);
857 fn elaborated_drop_block
<'a
>(&mut self) -> BasicBlock
{
858 debug
!("elaborated_drop_block({:?})", self);
859 let unwind
= self.unwind
; // FIXME(#43234)
860 let succ
= self.succ
;
861 let blk
= self.drop_block(succ
, unwind
);
862 self.elaborate_drop(blk
);
866 fn box_free_block
<'a
>(
868 adt
: &'tcx ty
::AdtDef
,
869 substs
: &'tcx Substs
<'tcx
>,
873 let block
= self.unelaborated_free_block(adt
, substs
, target
, unwind
);
874 self.drop_flag_test_block(block
, target
, unwind
)
877 fn unelaborated_free_block
<'a
>(
879 adt
: &'tcx ty
::AdtDef
,
880 substs
: &'tcx Substs
<'tcx
>,
884 let tcx
= self.tcx();
885 let unit_temp
= Place
::Local(self.new_temp(tcx
.mk_nil()));
886 let free_func
= tcx
.require_lang_item(lang_items
::BoxFreeFnLangItem
);
887 let args
= adt
.variants
[0].fields
.iter().enumerate().map(|(i
, f
)| {
888 let field
= Field
::new(i
);
889 let field_ty
= f
.ty(self.tcx(), substs
);
890 Operand
::Move(self.place
.clone().field(field
, field_ty
))
893 let call
= TerminatorKind
::Call
{
894 func
: Operand
::function_handle(tcx
, free_func
, substs
, self.source_info
.span
),
896 destination
: Some((unit_temp
, target
)),
899 let free_block
= self.new_block(unwind
, call
);
901 let block_start
= Location { block: free_block, statement_index: 0 }
;
902 self.elaborator
.clear_drop_flag(block_start
, self.path
, DropFlagMode
::Shallow
);
906 fn drop_block
<'a
>(&mut self, target
: BasicBlock
, unwind
: Unwind
) -> BasicBlock
{
907 let block
= TerminatorKind
::Drop
{
908 location
: self.place
.clone(),
910 unwind
: unwind
.into_option()
912 self.new_block(unwind
, block
)
915 fn drop_flag_test_block(&mut self,
917 on_unset
: BasicBlock
,
921 let style
= self.elaborator
.drop_style(self.path
, DropFlagMode
::Shallow
);
922 debug
!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
923 self, on_set
, on_unset
, unwind
, style
);
926 DropStyle
::Dead
=> on_unset
,
927 DropStyle
::Static
=> on_set
,
928 DropStyle
::Conditional
| DropStyle
::Open
=> {
929 let flag
= self.elaborator
.get_drop_flag(self.path
).unwrap();
930 let term
= TerminatorKind
::if_(self.tcx(), flag
, on_set
, on_unset
);
931 self.new_block(unwind
, term
)
936 fn new_block
<'a
>(&mut self,
938 k
: TerminatorKind
<'tcx
>)
941 self.elaborator
.patch().new_block(BasicBlockData
{
943 terminator
: Some(Terminator
{
944 source_info
: self.source_info
, kind
: k
946 is_cleanup
: unwind
.is_cleanup()
950 fn new_temp(&mut self, ty
: Ty
<'tcx
>) -> Local
{
951 self.elaborator
.patch().new_temp(ty
, self.source_info
.span
)
954 fn terminator_loc(&mut self, bb
: BasicBlock
) -> Location
{
955 let mir
= self.elaborator
.mir();
956 self.elaborator
.patch().terminator_loc(mir
, bb
)
959 fn constant_usize(&self, val
: u16) -> Operand
<'tcx
> {
960 Operand
::Constant(box Constant
{
961 span
: self.source_info
.span
,
962 ty
: self.tcx().types
.usize,
963 literal
: Literal
::Value
{
964 value
: ty
::Const
::from_usize(self.tcx(), val
.into())
969 fn assign(&self, lhs
: &Place
<'tcx
>, rhs
: Rvalue
<'tcx
>) -> Statement
<'tcx
> {
971 source_info
: self.source_info
,
972 kind
: StatementKind
::Assign(lhs
.clone(), rhs
)