1 use rustc_middle
::mir
::{self, Body, Location, Place}
;
2 use rustc_middle
::ty
::RegionVid
;
3 use rustc_middle
::ty
::TyCtxt
;
5 use rustc_data_structures
::fx
::FxHashMap
;
6 use rustc_index
::bit_set
::BitSet
;
8 use crate::borrow_check
::{
9 places_conflict
, BorrowSet
, PlaceConflictBias
, PlaceExt
, RegionInferenceContext
, ToRegionVid
,
11 use crate::dataflow
::{self, fmt::DebugWithContext, GenKill}
;
15 rustc_index
::newtype_index
! {
16 pub struct BorrowIndex
{
21 /// `Borrows` stores the data used in the analyses that track the flow
24 /// It uniquely identifies every borrow (`Rvalue::Ref`) by a
25 /// `BorrowIndex`, and maps each such index to a `BorrowData`
26 /// describing the borrow. These indexes are used for representing the
27 /// borrows in compact bitvectors.
28 pub struct Borrows
<'a
, 'tcx
> {
32 borrow_set
: &'a BorrowSet
<'tcx
>,
33 borrows_out_of_scope_at_location
: FxHashMap
<Location
, Vec
<BorrowIndex
>>,
42 struct OutOfScopePrecomputer
<'a
, 'tcx
> {
43 visited
: BitSet
<mir
::BasicBlock
>,
44 visit_stack
: Vec
<StackEntry
>,
46 regioncx
: &'a RegionInferenceContext
<'tcx
>,
47 borrows_out_of_scope_at_location
: FxHashMap
<Location
, Vec
<BorrowIndex
>>,
50 impl<'a
, 'tcx
> OutOfScopePrecomputer
<'a
, 'tcx
> {
51 fn new(body
: &'a Body
<'tcx
>, regioncx
: &'a RegionInferenceContext
<'tcx
>) -> Self {
52 OutOfScopePrecomputer
{
53 visited
: BitSet
::new_empty(body
.basic_blocks().len()),
57 borrows_out_of_scope_at_location
: FxHashMap
::default(),
62 impl<'tcx
> OutOfScopePrecomputer
<'_
, 'tcx
> {
63 fn precompute_borrows_out_of_scope(
65 borrow_index
: BorrowIndex
,
66 borrow_region
: RegionVid
,
69 // We visit one BB at a time. The complication is that we may start in the
70 // middle of the first BB visited (the one containing `location`), in which
71 // case we may have to later on process the first part of that BB if there
72 // is a path back to its start.
74 // For visited BBs, we record the index of the first statement processed.
75 // (In fully processed BBs this index is 0.) Note also that we add BBs to
76 // `visited` once they are added to `stack`, before they are actually
77 // processed, because this avoids the need to look them up again on
79 self.visited
.insert(location
.block
);
81 let mut first_lo
= location
.statement_index
;
82 let first_hi
= self.body
[location
.block
].statements
.len();
84 self.visit_stack
.push(StackEntry { bb: location.block, lo: first_lo, hi: first_hi }
);
86 while let Some(StackEntry { bb, lo, hi }
) = self.visit_stack
.pop() {
87 // If we process the first part of the first basic block (i.e. we encounter that block
88 // for the second time), we no longer have to visit its successors again.
89 let mut finished_early
= bb
== location
.block
&& hi
!= first_hi
;
91 let location
= Location { block: bb, statement_index: i }
;
92 // If region does not contain a point at the location, then add to list and skip
93 // successor locations.
94 if !self.regioncx
.region_contains(borrow_region
, location
) {
95 debug
!("borrow {:?} gets killed at {:?}", borrow_index
, location
);
96 self.borrows_out_of_scope_at_location
100 finished_early
= true;
106 // Add successor BBs to the work list, if necessary.
107 let bb_data
= &self.body
[bb
];
108 debug_assert
!(hi
== bb_data
.statements
.len());
109 for &succ_bb
in bb_data
.terminator().successors() {
110 if self.visited
.insert(succ_bb
) == false {
111 if succ_bb
== location
.block
&& first_lo
> 0 {
112 // `succ_bb` has been seen before. If it wasn't
113 // fully processed, add its first part to `stack`
115 self.visit_stack
.push(StackEntry
{
121 // And update this entry with 0, to represent the
122 // whole BB being processed.
126 // succ_bb hasn't been seen before. Add it to
127 // `stack` for processing.
128 self.visit_stack
.push(StackEntry
{
131 hi
: self.body
[succ_bb
].statements
.len(),
138 self.visited
.clear();
142 impl<'a
, 'tcx
> Borrows
<'a
, 'tcx
> {
145 body
: &'a Body
<'tcx
>,
146 nonlexical_regioncx
: &'a RegionInferenceContext
<'tcx
>,
147 borrow_set
: &'a BorrowSet
<'tcx
>,
149 let mut prec
= OutOfScopePrecomputer
::new(body
, nonlexical_regioncx
);
150 for (borrow_index
, borrow_data
) in borrow_set
.iter_enumerated() {
151 let borrow_region
= borrow_data
.region
.to_region_vid();
152 let location
= borrow_data
.reserve_location
;
154 prec
.precompute_borrows_out_of_scope(borrow_index
, borrow_region
, location
);
161 borrows_out_of_scope_at_location
: prec
.borrows_out_of_scope_at_location
,
165 pub fn location(&self, idx
: BorrowIndex
) -> &Location
{
166 &self.borrow_set
[idx
].reserve_location
169 /// Add all borrows to the kill set, if those borrows are out of scope at `location`.
170 /// That means they went out of a nonlexical scope
171 fn kill_loans_out_of_scope_at_location(
173 trans
: &mut impl GenKill
<BorrowIndex
>,
176 // NOTE: The state associated with a given `location`
177 // reflects the dataflow on entry to the statement.
178 // Iterate over each of the borrows that we've precomputed
179 // to have went out of scope at this location and kill them.
181 // We are careful always to call this function *before* we
182 // set up the gen-bits for the statement or
183 // terminator. That way, if the effect of the statement or
184 // terminator *does* introduce a new loan of the same
185 // region, then setting that gen-bit will override any
186 // potential kill introduced here.
187 if let Some(indices
) = self.borrows_out_of_scope_at_location
.get(&location
) {
188 trans
.kill_all(indices
.iter().copied());
192 /// Kill any borrows that conflict with `place`.
193 fn kill_borrows_on_place(&self, trans
: &mut impl GenKill
<BorrowIndex
>, place
: Place
<'tcx
>) {
194 debug
!("kill_borrows_on_place: place={:?}", place
);
196 let other_borrows_of_local
= self
201 .flat_map(|bs
| bs
.iter())
204 // If the borrowed place is a local with no projections, all other borrows of this
205 // local must conflict. This is purely an optimization so we don't have to call
206 // `places_conflict` for every borrow.
207 if place
.projection
.is_empty() {
208 if !self.body
.local_decls
[place
.local
].is_ref_to_static() {
209 trans
.kill_all(other_borrows_of_local
);
214 // By passing `PlaceConflictBias::NoOverlap`, we conservatively assume that any given
215 // pair of array indices are unequal, so that when `places_conflict` returns true, we
216 // will be assured that two places being compared definitely denotes the same sets of
218 let definitely_conflicting_borrows
= other_borrows_of_local
.filter(|&i
| {
222 self.borrow_set
[i
].borrowed_place
,
224 PlaceConflictBias
::NoOverlap
,
228 trans
.kill_all(definitely_conflicting_borrows
);
232 impl<'tcx
> dataflow
::AnalysisDomain
<'tcx
> for Borrows
<'_
, 'tcx
> {
233 type Domain
= BitSet
<BorrowIndex
>;
235 const NAME
: &'
static str = "borrows";
237 fn bottom_value(&self, _
: &mir
::Body
<'tcx
>) -> Self::Domain
{
238 // bottom = nothing is reserved or activated yet;
239 BitSet
::new_empty(self.borrow_set
.len() * 2)
242 fn initialize_start_block(&self, _
: &mir
::Body
<'tcx
>, _
: &mut Self::Domain
) {
243 // no borrows of code region_scopes have been taken prior to
244 // function execution, so this method has no effect.
248 impl<'tcx
> dataflow
::GenKillAnalysis
<'tcx
> for Borrows
<'_
, 'tcx
> {
249 type Idx
= BorrowIndex
;
251 fn before_statement_effect(
253 trans
: &mut impl GenKill
<Self::Idx
>,
254 _statement
: &mir
::Statement
<'tcx
>,
257 self.kill_loans_out_of_scope_at_location(trans
, location
);
262 trans
: &mut impl GenKill
<Self::Idx
>,
263 stmt
: &mir
::Statement
<'tcx
>,
267 mir
::StatementKind
::Assign(box (lhs
, ref rhs
)) => {
268 if let mir
::Rvalue
::Ref(_
, _
, place
) = *rhs
{
269 if place
.ignore_borrow(
272 &self.borrow_set
.locals_state_at_exit
,
276 let index
= self.borrow_set
.get_index_of(&location
).unwrap_or_else(|| {
277 panic
!("could not find BorrowIndex for location {:?}", location
);
283 // Make sure there are no remaining borrows for variables
284 // that are assigned over.
285 self.kill_borrows_on_place(trans
, lhs
);
288 mir
::StatementKind
::StorageDead(local
) => {
289 // Make sure there are no remaining borrows for locals that
290 // are gone out of scope.
291 self.kill_borrows_on_place(trans
, Place
::from(local
));
294 mir
::StatementKind
::LlvmInlineAsm(ref asm
) => {
295 for (output
, kind
) in asm
.outputs
.iter().zip(&asm
.asm
.outputs
) {
296 if !kind
.is_indirect
&& !kind
.is_rw
{
297 self.kill_borrows_on_place(trans
, *output
);
302 mir
::StatementKind
::FakeRead(..)
303 | mir
::StatementKind
::SetDiscriminant { .. }
304 | mir
::StatementKind
::StorageLive(..)
305 | mir
::StatementKind
::Retag { .. }
306 | mir
::StatementKind
::AscribeUserType(..)
307 | mir
::StatementKind
::Coverage(..)
308 | mir
::StatementKind
::CopyNonOverlapping(..)
309 | mir
::StatementKind
::Nop
=> {}
313 fn before_terminator_effect(
315 trans
: &mut impl GenKill
<Self::Idx
>,
316 _terminator
: &mir
::Terminator
<'tcx
>,
319 self.kill_loans_out_of_scope_at_location(trans
, location
);
322 fn terminator_effect(
324 trans
: &mut impl GenKill
<Self::Idx
>,
325 teminator
: &mir
::Terminator
<'tcx
>,
328 if let mir
::TerminatorKind
::InlineAsm { operands, .. }
= &teminator
.kind
{
330 if let mir
::InlineAsmOperand
::Out { place: Some(place), .. }
331 | mir
::InlineAsmOperand
::InOut { out_place: Some(place), .. }
= *op
333 self.kill_borrows_on_place(trans
, place
);
339 fn call_return_effect(
341 _trans
: &mut impl GenKill
<Self::Idx
>,
342 _block
: mir
::BasicBlock
,
343 _func
: &mir
::Operand
<'tcx
>,
344 _args
: &[mir
::Operand
<'tcx
>],
345 _dest_place
: mir
::Place
<'tcx
>,
350 impl DebugWithContext
<Borrows
<'_
, '_
>> for BorrowIndex
{
351 fn fmt_with(&self, ctxt
: &Borrows
<'_
, '_
>, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
352 write
!(f
, "{:?}", ctxt
.location(*self))