]>
Commit | Line | Data |
---|---|---|
abe05a73 XL |
1 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
11 | //! The code to do lexical region resolution. | |
12 | ||
13 | use infer::SubregionOrigin; | |
14 | use infer::RegionVariableOrigin; | |
15 | use infer::region_constraints::Constraint; | |
16 | use infer::region_constraints::GenericKind; | |
17 | use infer::region_constraints::RegionConstraintData; | |
83c7162d | 18 | use infer::region_constraints::VarInfos; |
abe05a73 XL |
19 | use infer::region_constraints::VerifyBound; |
20 | use middle::free_region::RegionRelations; | |
21 | use rustc_data_structures::indexed_vec::{Idx, IndexVec}; | |
22 | use rustc_data_structures::fx::FxHashSet; | |
8faf50e0 | 23 | use rustc_data_structures::graph::implementation::{Graph, Direction, NodeIndex, INCOMING, OUTGOING}; |
abe05a73 XL |
24 | use std::fmt; |
25 | use std::u32; | |
26 | use ty::{self, TyCtxt}; | |
27 | use ty::{Region, RegionVid}; | |
28 | use ty::{ReEarlyBound, ReEmpty, ReErased, ReFree, ReStatic}; | |
29 | use ty::{ReLateBound, ReScope, ReSkolemized, ReVar}; | |
30 | ||
31 | mod graphviz; | |
32 | ||
33 | /// This function performs lexical region resolution given a complete | |
34 | /// set of constraints and variable origins. It performs a fixed-point | |
35 | /// iteration to find region values which satisfy all constraints, | |
36 | /// assuming such values can be found. It returns the final values of | |
37 | /// all the variables as well as a set of errors that must be reported. | |
38 | pub fn resolve<'tcx>( | |
39 | region_rels: &RegionRelations<'_, '_, 'tcx>, | |
83c7162d | 40 | var_infos: VarInfos, |
abe05a73 XL |
41 | data: RegionConstraintData<'tcx>, |
42 | ) -> ( | |
43 | LexicalRegionResolutions<'tcx>, | |
44 | Vec<RegionResolutionError<'tcx>>, | |
45 | ) { | |
46 | debug!("RegionConstraintData: resolve_regions()"); | |
47 | let mut errors = vec![]; | |
48 | let mut resolver = LexicalResolver { | |
49 | region_rels, | |
83c7162d | 50 | var_infos, |
abe05a73 XL |
51 | data, |
52 | }; | |
53 | let values = resolver.infer_variable_values(&mut errors); | |
54 | (values, errors) | |
55 | } | |
56 | ||
57 | /// Contains the result of lexical region resolution. Offers methods | |
58 | /// to lookup up the final value of a region variable. | |
59 | pub struct LexicalRegionResolutions<'tcx> { | |
60 | values: IndexVec<RegionVid, VarValue<'tcx>>, | |
61 | error_region: ty::Region<'tcx>, | |
62 | } | |
63 | ||
64 | #[derive(Copy, Clone, Debug)] | |
65 | enum VarValue<'tcx> { | |
66 | Value(Region<'tcx>), | |
67 | ErrorValue, | |
68 | } | |
69 | ||
70 | #[derive(Clone, Debug)] | |
71 | pub enum RegionResolutionError<'tcx> { | |
72 | /// `ConcreteFailure(o, a, b)`: | |
73 | /// | |
74 | /// `o` requires that `a <= b`, but this does not hold | |
75 | ConcreteFailure(SubregionOrigin<'tcx>, Region<'tcx>, Region<'tcx>), | |
76 | ||
77 | /// `GenericBoundFailure(p, s, a) | |
78 | /// | |
79 | /// The parameter/associated-type `p` must be known to outlive the lifetime | |
80 | /// `a` (but none of the known bounds are sufficient). | |
81 | GenericBoundFailure(SubregionOrigin<'tcx>, GenericKind<'tcx>, Region<'tcx>), | |
82 | ||
83 | /// `SubSupConflict(v, sub_origin, sub_r, sup_origin, sup_r)`: | |
84 | /// | |
85 | /// Could not infer a value for `v` because `sub_r <= v` (due to | |
86 | /// `sub_origin`) but `v <= sup_r` (due to `sup_origin`) and | |
87 | /// `sub_r <= sup_r` does not hold. | |
88 | SubSupConflict( | |
89 | RegionVariableOrigin, | |
90 | SubregionOrigin<'tcx>, | |
91 | Region<'tcx>, | |
92 | SubregionOrigin<'tcx>, | |
93 | Region<'tcx>, | |
94 | ), | |
95 | } | |
96 | ||
97 | struct RegionAndOrigin<'tcx> { | |
98 | region: Region<'tcx>, | |
99 | origin: SubregionOrigin<'tcx>, | |
100 | } | |
101 | ||
8faf50e0 | 102 | type RegionGraph<'tcx> = Graph<(), Constraint<'tcx>>; |
abe05a73 XL |
103 | |
104 | struct LexicalResolver<'cx, 'gcx: 'tcx, 'tcx: 'cx> { | |
105 | region_rels: &'cx RegionRelations<'cx, 'gcx, 'tcx>, | |
83c7162d | 106 | var_infos: VarInfos, |
abe05a73 XL |
107 | data: RegionConstraintData<'tcx>, |
108 | } | |
109 | ||
110 | impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { | |
111 | fn infer_variable_values( | |
112 | &mut self, | |
113 | errors: &mut Vec<RegionResolutionError<'tcx>>, | |
114 | ) -> LexicalRegionResolutions<'tcx> { | |
115 | let mut var_data = self.construct_var_data(self.region_rels.tcx); | |
116 | ||
117 | // Dorky hack to cause `dump_constraints` to only get called | |
118 | // if debug mode is enabled: | |
119 | debug!( | |
120 | "----() End constraint listing (context={:?}) {:?}---", | |
121 | self.region_rels.context, | |
122 | self.dump_constraints(self.region_rels) | |
123 | ); | |
124 | graphviz::maybe_print_constraints_for(&self.data, self.region_rels); | |
125 | ||
126 | let graph = self.construct_graph(); | |
127 | self.expand_givens(&graph); | |
128 | self.expansion(&mut var_data); | |
129 | self.collect_errors(&mut var_data, errors); | |
130 | self.collect_var_errors(&var_data, &graph, errors); | |
131 | var_data | |
132 | } | |
133 | ||
134 | fn num_vars(&self) -> usize { | |
83c7162d | 135 | self.var_infos.len() |
abe05a73 XL |
136 | } |
137 | ||
138 | /// Initially, the value for all variables is set to `'empty`, the | |
139 | /// empty region. The `expansion` phase will grow this larger. | |
140 | fn construct_var_data(&self, tcx: TyCtxt<'_, '_, 'tcx>) -> LexicalRegionResolutions<'tcx> { | |
141 | LexicalRegionResolutions { | |
142 | error_region: tcx.types.re_static, | |
143 | values: (0..self.num_vars()) | |
144 | .map(|_| VarValue::Value(tcx.types.re_empty)) | |
145 | .collect(), | |
146 | } | |
147 | } | |
148 | ||
149 | fn dump_constraints(&self, free_regions: &RegionRelations<'_, '_, 'tcx>) { | |
150 | debug!( | |
151 | "----() Start constraint listing (context={:?}) ()----", | |
152 | free_regions.context | |
153 | ); | |
154 | for (idx, (constraint, _)) in self.data.constraints.iter().enumerate() { | |
155 | debug!("Constraint {} => {:?}", idx, constraint); | |
156 | } | |
157 | } | |
158 | ||
159 | fn expand_givens(&mut self, graph: &RegionGraph) { | |
160 | // Givens are a kind of horrible hack to account for | |
161 | // constraints like 'c <= '0 that are known to hold due to | |
162 | // closure signatures (see the comment above on the `givens` | |
163 | // field). They should go away. But until they do, the role | |
164 | // of this fn is to account for the transitive nature: | |
165 | // | |
166 | // Given 'c <= '0 | |
167 | // and '0 <= '1 | |
168 | // then 'c <= '1 | |
169 | ||
170 | let seeds: Vec<_> = self.data.givens.iter().cloned().collect(); | |
171 | for (r, vid) in seeds { | |
172 | // While all things transitively reachable in the graph | |
173 | // from the variable (`'0` in the example above). | |
ff7c6d11 | 174 | let seed_index = NodeIndex(vid.index() as usize); |
abe05a73 XL |
175 | for succ_index in graph.depth_traverse(seed_index, OUTGOING) { |
176 | let succ_index = succ_index.0; | |
177 | ||
178 | // The first N nodes correspond to the region | |
179 | // variables. Other nodes correspond to constant | |
180 | // regions. | |
181 | if succ_index < self.num_vars() { | |
182 | let succ_vid = RegionVid::new(succ_index); | |
183 | ||
184 | // Add `'c <= '1`. | |
185 | self.data.givens.insert((r, succ_vid)); | |
186 | } | |
187 | } | |
188 | } | |
189 | } | |
190 | ||
191 | fn expansion(&self, var_values: &mut LexicalRegionResolutions<'tcx>) { | |
192 | self.iterate_until_fixed_point("Expansion", |constraint, origin| { | |
193 | debug!("expansion: constraint={:?} origin={:?}", constraint, origin); | |
194 | match *constraint { | |
195 | Constraint::RegSubVar(a_region, b_vid) => { | |
196 | let b_data = var_values.value_mut(b_vid); | |
197 | self.expand_node(a_region, b_vid, b_data) | |
198 | } | |
199 | Constraint::VarSubVar(a_vid, b_vid) => match *var_values.value(a_vid) { | |
200 | VarValue::ErrorValue => false, | |
201 | VarValue::Value(a_region) => { | |
202 | let b_node = var_values.value_mut(b_vid); | |
203 | self.expand_node(a_region, b_vid, b_node) | |
204 | } | |
205 | }, | |
206 | Constraint::RegSubReg(..) | Constraint::VarSubReg(..) => { | |
207 | // These constraints are checked after expansion | |
208 | // is done, in `collect_errors`. | |
209 | false | |
210 | } | |
211 | } | |
212 | }) | |
213 | } | |
214 | ||
215 | fn expand_node( | |
216 | &self, | |
217 | a_region: Region<'tcx>, | |
218 | b_vid: RegionVid, | |
219 | b_data: &mut VarValue<'tcx>, | |
220 | ) -> bool { | |
221 | debug!("expand_node({:?}, {:?} == {:?})", a_region, b_vid, b_data); | |
222 | ||
223 | // Check if this relationship is implied by a given. | |
224 | match *a_region { | |
225 | ty::ReEarlyBound(_) | ty::ReFree(_) => if self.data.givens.contains(&(a_region, b_vid)) | |
226 | { | |
227 | debug!("given"); | |
228 | return false; | |
229 | }, | |
230 | _ => {} | |
231 | } | |
232 | ||
233 | match *b_data { | |
234 | VarValue::Value(cur_region) => { | |
235 | let lub = self.lub_concrete_regions(a_region, cur_region); | |
236 | if lub == cur_region { | |
237 | return false; | |
238 | } | |
239 | ||
240 | debug!( | |
241 | "Expanding value of {:?} from {:?} to {:?}", | |
242 | b_vid, | |
243 | cur_region, | |
244 | lub | |
245 | ); | |
246 | ||
247 | *b_data = VarValue::Value(lub); | |
248 | return true; | |
249 | } | |
250 | ||
251 | VarValue::ErrorValue => { | |
252 | return false; | |
253 | } | |
254 | } | |
255 | } | |
256 | ||
257 | ||
258 | fn lub_concrete_regions(&self, a: Region<'tcx>, b: Region<'tcx>) -> Region<'tcx> { | |
259 | let tcx = self.region_rels.tcx; | |
260 | match (a, b) { | |
0531ce1d XL |
261 | (&ty::ReCanonical(..), _) | |
262 | (_, &ty::ReCanonical(..)) | | |
ff7c6d11 XL |
263 | (&ty::ReClosureBound(..), _) | |
264 | (_, &ty::ReClosureBound(..)) | | |
265 | (&ReLateBound(..), _) | | |
266 | (_, &ReLateBound(..)) | | |
267 | (&ReErased, _) | | |
268 | (_, &ReErased) => { | |
abe05a73 XL |
269 | bug!("cannot relate region: LUB({:?}, {:?})", a, b); |
270 | } | |
271 | ||
272 | (r @ &ReStatic, _) | (_, r @ &ReStatic) => { | |
273 | r // nothing lives longer than static | |
274 | } | |
275 | ||
276 | (&ReEmpty, r) | (r, &ReEmpty) => { | |
277 | r // everything lives longer than empty | |
278 | } | |
279 | ||
280 | (&ReVar(v_id), _) | (_, &ReVar(v_id)) => { | |
281 | span_bug!( | |
83c7162d | 282 | self.var_infos[v_id].origin.span(), |
abe05a73 XL |
283 | "lub_concrete_regions invoked with non-concrete \ |
284 | regions: {:?}, {:?}", | |
285 | a, | |
286 | b | |
287 | ); | |
288 | } | |
289 | ||
290 | (&ReEarlyBound(_), &ReScope(s_id)) | | |
291 | (&ReScope(s_id), &ReEarlyBound(_)) | | |
292 | (&ReFree(_), &ReScope(s_id)) | | |
293 | (&ReScope(s_id), &ReFree(_)) => { | |
294 | // A "free" region can be interpreted as "some region | |
295 | // at least as big as fr.scope". So, we can | |
296 | // reasonably compare free regions and scopes: | |
297 | let fr_scope = match (a, b) { | |
298 | (&ReEarlyBound(ref br), _) | (_, &ReEarlyBound(ref br)) => self.region_rels | |
299 | .region_scope_tree | |
300 | .early_free_scope(self.region_rels.tcx, br), | |
301 | (&ReFree(ref fr), _) | (_, &ReFree(ref fr)) => self.region_rels | |
302 | .region_scope_tree | |
303 | .free_scope(self.region_rels.tcx, fr), | |
304 | _ => bug!(), | |
305 | }; | |
306 | let r_id = self.region_rels | |
307 | .region_scope_tree | |
308 | .nearest_common_ancestor(fr_scope, s_id); | |
309 | if r_id == fr_scope { | |
310 | // if the free region's scope `fr.scope` is bigger than | |
311 | // the scope region `s_id`, then the LUB is the free | |
312 | // region itself: | |
313 | match (a, b) { | |
314 | (_, &ReScope(_)) => return a, | |
315 | (&ReScope(_), _) => return b, | |
316 | _ => bug!(), | |
317 | } | |
318 | } | |
319 | ||
320 | // otherwise, we don't know what the free region is, | |
321 | // so we must conservatively say the LUB is static: | |
322 | tcx.types.re_static | |
323 | } | |
324 | ||
325 | (&ReScope(a_id), &ReScope(b_id)) => { | |
326 | // The region corresponding to an outer block is a | |
327 | // subtype of the region corresponding to an inner | |
328 | // block. | |
329 | let lub = self.region_rels | |
330 | .region_scope_tree | |
331 | .nearest_common_ancestor(a_id, b_id); | |
332 | tcx.mk_region(ReScope(lub)) | |
333 | } | |
334 | ||
335 | (&ReEarlyBound(_), &ReEarlyBound(_)) | | |
336 | (&ReFree(_), &ReEarlyBound(_)) | | |
337 | (&ReEarlyBound(_), &ReFree(_)) | | |
338 | (&ReFree(_), &ReFree(_)) => self.region_rels.lub_free_regions(a, b), | |
339 | ||
340 | // For these types, we cannot define any additional | |
341 | // relationship: | |
342 | (&ReSkolemized(..), _) | (_, &ReSkolemized(..)) => if a == b { | |
343 | a | |
344 | } else { | |
345 | tcx.types.re_static | |
346 | }, | |
347 | } | |
348 | } | |
349 | ||
350 | /// After expansion is complete, go and check upper bounds (i.e., | |
351 | /// cases where the region cannot grow larger than a fixed point) | |
352 | /// and check that they are satisfied. | |
353 | fn collect_errors( | |
354 | &self, | |
355 | var_data: &mut LexicalRegionResolutions<'tcx>, | |
356 | errors: &mut Vec<RegionResolutionError<'tcx>>, | |
357 | ) { | |
358 | for (constraint, origin) in &self.data.constraints { | |
359 | debug!( | |
360 | "collect_errors: constraint={:?} origin={:?}", | |
361 | constraint, | |
362 | origin | |
363 | ); | |
364 | match *constraint { | |
365 | Constraint::RegSubVar(..) | Constraint::VarSubVar(..) => { | |
366 | // Expansion will ensure that these constraints hold. Ignore. | |
367 | } | |
368 | ||
369 | Constraint::RegSubReg(sub, sup) => { | |
370 | if self.region_rels.is_subregion_of(sub, sup) { | |
371 | continue; | |
372 | } | |
373 | ||
374 | debug!( | |
375 | "collect_errors: region error at {:?}: \ | |
376 | cannot verify that {:?} <= {:?}", | |
377 | origin, | |
378 | sub, | |
379 | sup | |
380 | ); | |
381 | ||
382 | errors.push(RegionResolutionError::ConcreteFailure( | |
383 | (*origin).clone(), | |
384 | sub, | |
385 | sup, | |
386 | )); | |
387 | } | |
388 | ||
389 | Constraint::VarSubReg(a_vid, b_region) => { | |
390 | let a_data = var_data.value_mut(a_vid); | |
391 | debug!("contraction: {:?} == {:?}, {:?}", a_vid, a_data, b_region); | |
392 | ||
393 | let a_region = match *a_data { | |
394 | VarValue::ErrorValue => continue, | |
395 | VarValue::Value(a_region) => a_region, | |
396 | }; | |
397 | ||
398 | // Do not report these errors immediately: | |
399 | // instead, set the variable value to error and | |
400 | // collect them later. | |
401 | if !self.region_rels.is_subregion_of(a_region, b_region) { | |
402 | debug!( | |
403 | "collect_errors: region error at {:?}: \ | |
404 | cannot verify that {:?}={:?} <= {:?}", | |
405 | origin, | |
406 | a_vid, | |
407 | a_region, | |
408 | b_region | |
409 | ); | |
410 | *a_data = VarValue::ErrorValue; | |
411 | } | |
412 | } | |
413 | } | |
414 | } | |
415 | ||
416 | for verify in &self.data.verifys { | |
417 | debug!("collect_errors: verify={:?}", verify); | |
418 | let sub = var_data.normalize(verify.region); | |
419 | ||
420 | // This was an inference variable which didn't get | |
421 | // constrained, therefore it can be assume to hold. | |
422 | if let ty::ReEmpty = *sub { | |
423 | continue; | |
424 | } | |
425 | ||
426 | if self.bound_is_met(&verify.bound, var_data, sub) { | |
427 | continue; | |
428 | } | |
429 | ||
430 | debug!( | |
431 | "collect_errors: region error at {:?}: \ | |
432 | cannot verify that {:?} <= {:?}", | |
433 | verify.origin, | |
434 | verify.region, | |
435 | verify.bound | |
436 | ); | |
437 | ||
438 | errors.push(RegionResolutionError::GenericBoundFailure( | |
439 | verify.origin.clone(), | |
440 | verify.kind.clone(), | |
441 | sub, | |
442 | )); | |
443 | } | |
444 | } | |
445 | ||
446 | /// Go over the variables that were declared to be error variables | |
447 | /// and create a `RegionResolutionError` for each of them. | |
448 | fn collect_var_errors( | |
449 | &self, | |
450 | var_data: &LexicalRegionResolutions<'tcx>, | |
451 | graph: &RegionGraph<'tcx>, | |
452 | errors: &mut Vec<RegionResolutionError<'tcx>>, | |
453 | ) { | |
454 | debug!("collect_var_errors"); | |
455 | ||
456 | // This is the best way that I have found to suppress | |
457 | // duplicate and related errors. Basically we keep a set of | |
458 | // flags for every node. Whenever an error occurs, we will | |
459 | // walk some portion of the graph looking to find pairs of | |
460 | // conflicting regions to report to the user. As we walk, we | |
461 | // trip the flags from false to true, and if we find that | |
462 | // we've already reported an error involving any particular | |
463 | // node we just stop and don't report the current error. The | |
464 | // idea is to report errors that derive from independent | |
465 | // regions of the graph, but not those that derive from | |
466 | // overlapping locations. | |
467 | let mut dup_vec = vec![u32::MAX; self.num_vars()]; | |
468 | ||
469 | for (node_vid, value) in var_data.values.iter_enumerated() { | |
470 | match *value { | |
471 | VarValue::Value(_) => { /* Inference successful */ } | |
472 | VarValue::ErrorValue => { | |
473 | /* Inference impossible, this value contains | |
474 | inconsistent constraints. | |
475 | ||
476 | I think that in this case we should report an | |
477 | error now---unlike the case above, we can't | |
478 | wait to see whether the user needs the result | |
479 | of this variable. The reason is that the mere | |
480 | existence of this variable implies that the | |
481 | region graph is inconsistent, whether or not it | |
482 | is used. | |
483 | ||
484 | For example, we may have created a region | |
485 | variable that is the GLB of two other regions | |
486 | which do not have a GLB. Even if that variable | |
487 | is not used, it implies that those two regions | |
488 | *should* have a GLB. | |
489 | ||
490 | At least I think this is true. It may be that | |
491 | the mere existence of a conflict in a region variable | |
492 | that is not used is not a problem, so if this rule | |
493 | starts to create problems we'll have to revisit | |
494 | this portion of the code and think hard about it. =) */ | |
495 | self.collect_error_for_expanding_node(graph, &mut dup_vec, node_vid, errors); | |
496 | } | |
497 | } | |
498 | } | |
499 | } | |
500 | ||
501 | fn construct_graph(&self) -> RegionGraph<'tcx> { | |
502 | let num_vars = self.num_vars(); | |
503 | ||
8faf50e0 | 504 | let mut graph = Graph::new(); |
abe05a73 XL |
505 | |
506 | for _ in 0..num_vars { | |
507 | graph.add_node(()); | |
508 | } | |
509 | ||
510 | // Issue #30438: two distinct dummy nodes, one for incoming | |
511 | // edges (dummy_source) and another for outgoing edges | |
512 | // (dummy_sink). In `dummy -> a -> b -> dummy`, using one | |
513 | // dummy node leads one to think (erroneously) there exists a | |
514 | // path from `b` to `a`. Two dummy nodes sidesteps the issue. | |
515 | let dummy_source = graph.add_node(()); | |
516 | let dummy_sink = graph.add_node(()); | |
517 | ||
518 | for (constraint, _) in &self.data.constraints { | |
519 | match *constraint { | |
520 | Constraint::VarSubVar(a_id, b_id) => { | |
521 | graph.add_edge( | |
ff7c6d11 XL |
522 | NodeIndex(a_id.index() as usize), |
523 | NodeIndex(b_id.index() as usize), | |
abe05a73 XL |
524 | *constraint, |
525 | ); | |
526 | } | |
527 | Constraint::RegSubVar(_, b_id) => { | |
ff7c6d11 | 528 | graph.add_edge(dummy_source, NodeIndex(b_id.index() as usize), *constraint); |
abe05a73 XL |
529 | } |
530 | Constraint::VarSubReg(a_id, _) => { | |
ff7c6d11 | 531 | graph.add_edge(NodeIndex(a_id.index() as usize), dummy_sink, *constraint); |
abe05a73 XL |
532 | } |
533 | Constraint::RegSubReg(..) => { | |
534 | // this would be an edge from `dummy_source` to | |
535 | // `dummy_sink`; just ignore it. | |
536 | } | |
537 | } | |
538 | } | |
539 | ||
540 | return graph; | |
541 | } | |
542 | ||
543 | fn collect_error_for_expanding_node( | |
544 | &self, | |
545 | graph: &RegionGraph<'tcx>, | |
546 | dup_vec: &mut [u32], | |
547 | node_idx: RegionVid, | |
548 | errors: &mut Vec<RegionResolutionError<'tcx>>, | |
549 | ) { | |
550 | // Errors in expanding nodes result from a lower-bound that is | |
551 | // not contained by an upper-bound. | |
552 | let (mut lower_bounds, lower_dup) = | |
8faf50e0 | 553 | self.collect_concrete_regions(graph, node_idx, INCOMING, dup_vec); |
abe05a73 | 554 | let (mut upper_bounds, upper_dup) = |
8faf50e0 | 555 | self.collect_concrete_regions(graph, node_idx, OUTGOING, dup_vec); |
abe05a73 XL |
556 | |
557 | if lower_dup || upper_dup { | |
558 | return; | |
559 | } | |
560 | ||
561 | // We place free regions first because we are special casing | |
562 | // SubSupConflict(ReFree, ReFree) when reporting error, and so | |
563 | // the user will more likely get a specific suggestion. | |
564 | fn region_order_key(x: &RegionAndOrigin) -> u8 { | |
565 | match *x.region { | |
566 | ReEarlyBound(_) => 0, | |
567 | ReFree(_) => 1, | |
568 | _ => 2, | |
569 | } | |
570 | } | |
571 | lower_bounds.sort_by_key(region_order_key); | |
572 | upper_bounds.sort_by_key(region_order_key); | |
573 | ||
574 | for lower_bound in &lower_bounds { | |
575 | for upper_bound in &upper_bounds { | |
576 | if !self.region_rels | |
577 | .is_subregion_of(lower_bound.region, upper_bound.region) | |
578 | { | |
83c7162d | 579 | let origin = self.var_infos[node_idx].origin.clone(); |
abe05a73 XL |
580 | debug!( |
581 | "region inference error at {:?} for {:?}: SubSupConflict sub: {:?} \ | |
582 | sup: {:?}", | |
583 | origin, | |
584 | node_idx, | |
585 | lower_bound.region, | |
586 | upper_bound.region | |
587 | ); | |
588 | errors.push(RegionResolutionError::SubSupConflict( | |
589 | origin, | |
590 | lower_bound.origin.clone(), | |
591 | lower_bound.region, | |
592 | upper_bound.origin.clone(), | |
593 | upper_bound.region, | |
594 | )); | |
595 | return; | |
596 | } | |
597 | } | |
598 | } | |
599 | ||
600 | span_bug!( | |
83c7162d | 601 | self.var_infos[node_idx].origin.span(), |
abe05a73 XL |
602 | "collect_error_for_expanding_node() could not find \ |
603 | error for var {:?}, lower_bounds={:?}, \ | |
604 | upper_bounds={:?}", | |
605 | node_idx, | |
606 | lower_bounds, | |
607 | upper_bounds | |
608 | ); | |
609 | } | |
610 | ||
611 | fn collect_concrete_regions( | |
612 | &self, | |
613 | graph: &RegionGraph<'tcx>, | |
614 | orig_node_idx: RegionVid, | |
615 | dir: Direction, | |
616 | dup_vec: &mut [u32], | |
617 | ) -> (Vec<RegionAndOrigin<'tcx>>, bool) { | |
618 | struct WalkState<'tcx> { | |
619 | set: FxHashSet<RegionVid>, | |
620 | stack: Vec<RegionVid>, | |
621 | result: Vec<RegionAndOrigin<'tcx>>, | |
622 | dup_found: bool, | |
623 | } | |
624 | let mut state = WalkState { | |
625 | set: FxHashSet(), | |
626 | stack: vec![orig_node_idx], | |
627 | result: Vec::new(), | |
628 | dup_found: false, | |
629 | }; | |
630 | state.set.insert(orig_node_idx); | |
631 | ||
632 | // to start off the process, walk the source node in the | |
633 | // direction specified | |
634 | process_edges(&self.data, &mut state, graph, orig_node_idx, dir); | |
635 | ||
636 | while !state.stack.is_empty() { | |
637 | let node_idx = state.stack.pop().unwrap(); | |
638 | ||
639 | // check whether we've visited this node on some previous walk | |
ff7c6d11 XL |
640 | if dup_vec[node_idx.index() as usize] == u32::MAX { |
641 | dup_vec[node_idx.index() as usize] = orig_node_idx.index() as u32; | |
642 | } else if dup_vec[node_idx.index() as usize] != orig_node_idx.index() as u32 { | |
abe05a73 XL |
643 | state.dup_found = true; |
644 | } | |
645 | ||
646 | debug!( | |
647 | "collect_concrete_regions(orig_node_idx={:?}, node_idx={:?})", | |
648 | orig_node_idx, | |
649 | node_idx | |
650 | ); | |
651 | ||
652 | process_edges(&self.data, &mut state, graph, node_idx, dir); | |
653 | } | |
654 | ||
655 | let WalkState { | |
656 | result, dup_found, .. | |
657 | } = state; | |
658 | return (result, dup_found); | |
659 | ||
660 | fn process_edges<'tcx>( | |
661 | this: &RegionConstraintData<'tcx>, | |
662 | state: &mut WalkState<'tcx>, | |
663 | graph: &RegionGraph<'tcx>, | |
664 | source_vid: RegionVid, | |
665 | dir: Direction, | |
666 | ) { | |
667 | debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir); | |
668 | ||
ff7c6d11 | 669 | let source_node_index = NodeIndex(source_vid.index() as usize); |
abe05a73 XL |
670 | for (_, edge) in graph.adjacent_edges(source_node_index, dir) { |
671 | match edge.data { | |
672 | Constraint::VarSubVar(from_vid, to_vid) => { | |
673 | let opp_vid = if from_vid == source_vid { | |
674 | to_vid | |
675 | } else { | |
676 | from_vid | |
677 | }; | |
678 | if state.set.insert(opp_vid) { | |
679 | state.stack.push(opp_vid); | |
680 | } | |
681 | } | |
682 | ||
683 | Constraint::RegSubVar(region, _) | Constraint::VarSubReg(_, region) => { | |
684 | state.result.push(RegionAndOrigin { | |
685 | region, | |
686 | origin: this.constraints.get(&edge.data).unwrap().clone(), | |
687 | }); | |
688 | } | |
689 | ||
690 | Constraint::RegSubReg(..) => panic!( | |
691 | "cannot reach reg-sub-reg edge in region inference \ | |
692 | post-processing" | |
693 | ), | |
694 | } | |
695 | } | |
696 | } | |
697 | } | |
698 | ||
699 | fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F) | |
700 | where | |
701 | F: FnMut(&Constraint<'tcx>, &SubregionOrigin<'tcx>) -> bool, | |
702 | { | |
703 | let mut iteration = 0; | |
704 | let mut changed = true; | |
705 | while changed { | |
706 | changed = false; | |
707 | iteration += 1; | |
708 | debug!("---- {} Iteration {}{}", "#", tag, iteration); | |
709 | for (constraint, origin) in &self.data.constraints { | |
710 | let edge_changed = body(constraint, origin); | |
711 | if edge_changed { | |
712 | debug!("Updated due to constraint {:?}", constraint); | |
713 | changed = true; | |
714 | } | |
715 | } | |
716 | } | |
717 | debug!("---- {} Complete after {} iteration(s)", tag, iteration); | |
718 | } | |
719 | ||
720 | fn bound_is_met( | |
721 | &self, | |
722 | bound: &VerifyBound<'tcx>, | |
723 | var_values: &LexicalRegionResolutions<'tcx>, | |
724 | min: ty::Region<'tcx>, | |
725 | ) -> bool { | |
726 | match bound { | |
727 | VerifyBound::AnyRegion(rs) => rs.iter() | |
728 | .map(|&r| var_values.normalize(r)) | |
729 | .any(|r| self.region_rels.is_subregion_of(min, r)), | |
730 | ||
731 | VerifyBound::AllRegions(rs) => rs.iter() | |
732 | .map(|&r| var_values.normalize(r)) | |
733 | .all(|r| self.region_rels.is_subregion_of(min, r)), | |
734 | ||
735 | VerifyBound::AnyBound(bs) => bs.iter().any(|b| self.bound_is_met(b, var_values, min)), | |
736 | ||
737 | VerifyBound::AllBounds(bs) => bs.iter().all(|b| self.bound_is_met(b, var_values, min)), | |
738 | } | |
739 | } | |
740 | } | |
741 | ||
742 | impl<'tcx> fmt::Debug for RegionAndOrigin<'tcx> { | |
743 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
744 | write!(f, "RegionAndOrigin({:?},{:?})", self.region, self.origin) | |
745 | } | |
746 | } | |
747 | ||
748 | ||
749 | impl<'tcx> LexicalRegionResolutions<'tcx> { | |
750 | fn normalize(&self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { | |
751 | match *r { | |
752 | ty::ReVar(rid) => self.resolve_var(rid), | |
753 | _ => r, | |
754 | } | |
755 | } | |
756 | ||
757 | fn value(&self, rid: RegionVid) -> &VarValue<'tcx> { | |
758 | &self.values[rid] | |
759 | } | |
760 | ||
761 | fn value_mut(&mut self, rid: RegionVid) -> &mut VarValue<'tcx> { | |
762 | &mut self.values[rid] | |
763 | } | |
764 | ||
765 | pub fn resolve_var(&self, rid: RegionVid) -> ty::Region<'tcx> { | |
766 | let result = match self.values[rid] { | |
767 | VarValue::Value(r) => r, | |
768 | VarValue::ErrorValue => self.error_region, | |
769 | }; | |
770 | debug!("resolve_var({:?}) = {:?}", rid, result); | |
771 | result | |
772 | } | |
773 | } |