]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/dataflow/impls/borrows.rs
New upstream version 1.52.0~beta.3+dfsg1
[rustc.git] / compiler / rustc_mir / src / dataflow / impls / borrows.rs
1 use rustc_middle::mir::{self, Body, Location, Place};
2 use rustc_middle::ty::RegionVid;
3 use rustc_middle::ty::TyCtxt;
4
5 use rustc_data_structures::fx::FxHashMap;
6 use rustc_index::bit_set::BitSet;
7
8 use crate::borrow_check::{
9 places_conflict, BorrowSet, PlaceConflictBias, PlaceExt, RegionInferenceContext, ToRegionVid,
10 };
11 use crate::dataflow::{self, fmt::DebugWithContext, GenKill};
12
13 use std::fmt;
14
15 rustc_index::newtype_index! {
16 pub struct BorrowIndex {
17 DEBUG_FORMAT = "bw{}"
18 }
19 }
20
21 /// `Borrows` stores the data used in the analyses that track the flow
22 /// of borrows.
23 ///
24 /// It uniquely identifies every borrow (`Rvalue::Ref`) by a
25 /// `BorrowIndex`, and maps each such index to a `BorrowData`
26 /// describing the borrow. These indexes are used for representing the
27 /// borrows in compact bitvectors.
28 pub struct Borrows<'a, 'tcx> {
29 tcx: TyCtxt<'tcx>,
30 body: &'a Body<'tcx>,
31
32 borrow_set: &'a BorrowSet<'tcx>,
33 borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
34 }
35
36 struct StackEntry {
37 bb: mir::BasicBlock,
38 lo: usize,
39 hi: usize,
40 }
41
42 struct OutOfScopePrecomputer<'a, 'tcx> {
43 visited: BitSet<mir::BasicBlock>,
44 visit_stack: Vec<StackEntry>,
45 body: &'a Body<'tcx>,
46 regioncx: &'a RegionInferenceContext<'tcx>,
47 borrows_out_of_scope_at_location: FxHashMap<Location, Vec<BorrowIndex>>,
48 }
49
50 impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> {
51 fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self {
52 OutOfScopePrecomputer {
53 visited: BitSet::new_empty(body.basic_blocks().len()),
54 visit_stack: vec![],
55 body,
56 regioncx,
57 borrows_out_of_scope_at_location: FxHashMap::default(),
58 }
59 }
60 }
61
62 impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
63 fn precompute_borrows_out_of_scope(
64 &mut self,
65 borrow_index: BorrowIndex,
66 borrow_region: RegionVid,
67 location: Location,
68 ) {
69 // We visit one BB at a time. The complication is that we may start in the
70 // middle of the first BB visited (the one containing `location`), in which
71 // case we may have to later on process the first part of that BB if there
72 // is a path back to its start.
73
74 // For visited BBs, we record the index of the first statement processed.
75 // (In fully processed BBs this index is 0.) Note also that we add BBs to
76 // `visited` once they are added to `stack`, before they are actually
77 // processed, because this avoids the need to look them up again on
78 // completion.
79 self.visited.insert(location.block);
80
81 let mut first_lo = location.statement_index;
82 let first_hi = self.body[location.block].statements.len();
83
84 self.visit_stack.push(StackEntry { bb: location.block, lo: first_lo, hi: first_hi });
85
86 while let Some(StackEntry { bb, lo, hi }) = self.visit_stack.pop() {
87 // If we process the first part of the first basic block (i.e. we encounter that block
88 // for the second time), we no longer have to visit its successors again.
89 let mut finished_early = bb == location.block && hi != first_hi;
90 for i in lo..=hi {
91 let location = Location { block: bb, statement_index: i };
92 // If region does not contain a point at the location, then add to list and skip
93 // successor locations.
94 if !self.regioncx.region_contains(borrow_region, location) {
95 debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
96 self.borrows_out_of_scope_at_location
97 .entry(location)
98 .or_default()
99 .push(borrow_index);
100 finished_early = true;
101 break;
102 }
103 }
104
105 if !finished_early {
106 // Add successor BBs to the work list, if necessary.
107 let bb_data = &self.body[bb];
108 debug_assert!(hi == bb_data.statements.len());
109 for &succ_bb in bb_data.terminator().successors() {
110 if self.visited.insert(succ_bb) == false {
111 if succ_bb == location.block && first_lo > 0 {
112 // `succ_bb` has been seen before. If it wasn't
113 // fully processed, add its first part to `stack`
114 // for processing.
115 self.visit_stack.push(StackEntry {
116 bb: succ_bb,
117 lo: 0,
118 hi: first_lo - 1,
119 });
120
121 // And update this entry with 0, to represent the
122 // whole BB being processed.
123 first_lo = 0;
124 }
125 } else {
126 // succ_bb hasn't been seen before. Add it to
127 // `stack` for processing.
128 self.visit_stack.push(StackEntry {
129 bb: succ_bb,
130 lo: 0,
131 hi: self.body[succ_bb].statements.len(),
132 });
133 }
134 }
135 }
136 }
137
138 self.visited.clear();
139 }
140 }
141
142 impl<'a, 'tcx> Borrows<'a, 'tcx> {
143 crate fn new(
144 tcx: TyCtxt<'tcx>,
145 body: &'a Body<'tcx>,
146 nonlexical_regioncx: &'a RegionInferenceContext<'tcx>,
147 borrow_set: &'a BorrowSet<'tcx>,
148 ) -> Self {
149 let mut prec = OutOfScopePrecomputer::new(body, nonlexical_regioncx);
150 for (borrow_index, borrow_data) in borrow_set.iter_enumerated() {
151 let borrow_region = borrow_data.region.to_region_vid();
152 let location = borrow_data.reserve_location;
153
154 prec.precompute_borrows_out_of_scope(borrow_index, borrow_region, location);
155 }
156
157 Borrows {
158 tcx,
159 body,
160 borrow_set,
161 borrows_out_of_scope_at_location: prec.borrows_out_of_scope_at_location,
162 }
163 }
164
165 pub fn location(&self, idx: BorrowIndex) -> &Location {
166 &self.borrow_set[idx].reserve_location
167 }
168
169 /// Add all borrows to the kill set, if those borrows are out of scope at `location`.
170 /// That means they went out of a nonlexical scope
171 fn kill_loans_out_of_scope_at_location(
172 &self,
173 trans: &mut impl GenKill<BorrowIndex>,
174 location: Location,
175 ) {
176 // NOTE: The state associated with a given `location`
177 // reflects the dataflow on entry to the statement.
178 // Iterate over each of the borrows that we've precomputed
179 // to have went out of scope at this location and kill them.
180 //
181 // We are careful always to call this function *before* we
182 // set up the gen-bits for the statement or
183 // terminator. That way, if the effect of the statement or
184 // terminator *does* introduce a new loan of the same
185 // region, then setting that gen-bit will override any
186 // potential kill introduced here.
187 if let Some(indices) = self.borrows_out_of_scope_at_location.get(&location) {
188 trans.kill_all(indices.iter().copied());
189 }
190 }
191
192 /// Kill any borrows that conflict with `place`.
193 fn kill_borrows_on_place(&self, trans: &mut impl GenKill<BorrowIndex>, place: Place<'tcx>) {
194 debug!("kill_borrows_on_place: place={:?}", place);
195
196 let other_borrows_of_local = self
197 .borrow_set
198 .local_map
199 .get(&place.local)
200 .into_iter()
201 .flat_map(|bs| bs.iter())
202 .copied();
203
204 // If the borrowed place is a local with no projections, all other borrows of this
205 // local must conflict. This is purely an optimization so we don't have to call
206 // `places_conflict` for every borrow.
207 if place.projection.is_empty() {
208 if !self.body.local_decls[place.local].is_ref_to_static() {
209 trans.kill_all(other_borrows_of_local);
210 }
211 return;
212 }
213
214 // By passing `PlaceConflictBias::NoOverlap`, we conservatively assume that any given
215 // pair of array indices are unequal, so that when `places_conflict` returns true, we
216 // will be assured that two places being compared definitely denotes the same sets of
217 // locations.
218 let definitely_conflicting_borrows = other_borrows_of_local.filter(|&i| {
219 places_conflict(
220 self.tcx,
221 self.body,
222 self.borrow_set[i].borrowed_place,
223 place,
224 PlaceConflictBias::NoOverlap,
225 )
226 });
227
228 trans.kill_all(definitely_conflicting_borrows);
229 }
230 }
231
232 impl<'tcx> dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
233 type Domain = BitSet<BorrowIndex>;
234
235 const NAME: &'static str = "borrows";
236
237 fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
238 // bottom = nothing is reserved or activated yet;
239 BitSet::new_empty(self.borrow_set.len() * 2)
240 }
241
242 fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
243 // no borrows of code region_scopes have been taken prior to
244 // function execution, so this method has no effect.
245 }
246 }
247
248 impl<'tcx> dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
249 type Idx = BorrowIndex;
250
251 fn before_statement_effect(
252 &self,
253 trans: &mut impl GenKill<Self::Idx>,
254 _statement: &mir::Statement<'tcx>,
255 location: Location,
256 ) {
257 self.kill_loans_out_of_scope_at_location(trans, location);
258 }
259
260 fn statement_effect(
261 &self,
262 trans: &mut impl GenKill<Self::Idx>,
263 stmt: &mir::Statement<'tcx>,
264 location: Location,
265 ) {
266 match stmt.kind {
267 mir::StatementKind::Assign(box (lhs, ref rhs)) => {
268 if let mir::Rvalue::Ref(_, _, place) = *rhs {
269 if place.ignore_borrow(
270 self.tcx,
271 self.body,
272 &self.borrow_set.locals_state_at_exit,
273 ) {
274 return;
275 }
276 let index = self.borrow_set.get_index_of(&location).unwrap_or_else(|| {
277 panic!("could not find BorrowIndex for location {:?}", location);
278 });
279
280 trans.gen(index);
281 }
282
283 // Make sure there are no remaining borrows for variables
284 // that are assigned over.
285 self.kill_borrows_on_place(trans, lhs);
286 }
287
288 mir::StatementKind::StorageDead(local) => {
289 // Make sure there are no remaining borrows for locals that
290 // are gone out of scope.
291 self.kill_borrows_on_place(trans, Place::from(local));
292 }
293
294 mir::StatementKind::LlvmInlineAsm(ref asm) => {
295 for (output, kind) in asm.outputs.iter().zip(&asm.asm.outputs) {
296 if !kind.is_indirect && !kind.is_rw {
297 self.kill_borrows_on_place(trans, *output);
298 }
299 }
300 }
301
302 mir::StatementKind::FakeRead(..)
303 | mir::StatementKind::SetDiscriminant { .. }
304 | mir::StatementKind::StorageLive(..)
305 | mir::StatementKind::Retag { .. }
306 | mir::StatementKind::AscribeUserType(..)
307 | mir::StatementKind::Coverage(..)
308 | mir::StatementKind::CopyNonOverlapping(..)
309 | mir::StatementKind::Nop => {}
310 }
311 }
312
313 fn before_terminator_effect(
314 &self,
315 trans: &mut impl GenKill<Self::Idx>,
316 _terminator: &mir::Terminator<'tcx>,
317 location: Location,
318 ) {
319 self.kill_loans_out_of_scope_at_location(trans, location);
320 }
321
322 fn terminator_effect(
323 &self,
324 trans: &mut impl GenKill<Self::Idx>,
325 teminator: &mir::Terminator<'tcx>,
326 _location: Location,
327 ) {
328 if let mir::TerminatorKind::InlineAsm { operands, .. } = &teminator.kind {
329 for op in operands {
330 if let mir::InlineAsmOperand::Out { place: Some(place), .. }
331 | mir::InlineAsmOperand::InOut { out_place: Some(place), .. } = *op
332 {
333 self.kill_borrows_on_place(trans, place);
334 }
335 }
336 }
337 }
338
339 fn call_return_effect(
340 &self,
341 _trans: &mut impl GenKill<Self::Idx>,
342 _block: mir::BasicBlock,
343 _func: &mir::Operand<'tcx>,
344 _args: &[mir::Operand<'tcx>],
345 _dest_place: mir::Place<'tcx>,
346 ) {
347 }
348 }
349
350 impl DebugWithContext<Borrows<'_, '_>> for BorrowIndex {
351 fn fmt_with(&self, ctxt: &Borrows<'_, '_>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
352 write!(f, "{:?}", ctxt.location(*self))
353 }
354 }