1 use crate::fx
::FxHashMap
;
2 use crate::stable_hasher
::{HashStable, StableHasher}
;
4 use rustc_index
::bit_set
::BitMatrix
;
5 use rustc_serialize
::{Decodable, Decoder, Encodable, Encoder}
;
13 #[derive(Clone, Debug)]
14 pub struct TransitiveRelation
<T
: Eq
+ Hash
> {
15 // List of elements. This is used to map from a T to a usize.
18 // Maps each element to an index.
19 map
: FxHashMap
<T
, Index
>,
21 // List of base edges in the graph. Require to compute transitive
25 // This is a cached transitive closure derived from the edges.
26 // Currently, we build it lazilly and just throw out any existing
27 // copy whenever a new edge is added. (The Lock is to permit
28 // the lazy computation.) This is kind of silly, except for the
29 // fact its size is tied to `self.elements.len()`, so I wanted to
30 // wait before building it up to avoid reallocating as new edges
31 // are added with new elements. Perhaps better would be to ask the
32 // user for a batch of edges to minimize this effect, but I
33 // already wrote the code this way. :P -nmatsakis
34 closure
: Lock
<Option
<BitMatrix
<usize, usize>>>,
37 // HACK(eddyb) manual impl avoids `Default` bound on `T`.
38 impl<T
: Eq
+ Hash
> Default
for TransitiveRelation
<T
> {
39 fn default() -> Self {
41 elements
: Default
::default(),
42 map
: Default
::default(),
43 edges
: Default
::default(),
44 closure
: Default
::default(),
49 #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, RustcEncodable, RustcDecodable, Debug)]
52 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
58 impl<T
: Clone
+ Debug
+ Eq
+ Hash
> TransitiveRelation
<T
> {
59 pub fn is_empty(&self) -> bool
{
63 pub fn elements(&self) -> impl Iterator
<Item
= &T
> {
67 fn index(&self, a
: &T
) -> Option
<Index
> {
68 self.map
.get(a
).cloned()
71 fn add_index(&mut self, a
: T
) -> Index
{
72 let &mut TransitiveRelation { ref mut elements, ref mut closure, ref mut map, .. }
= self;
74 *map
.entry(a
.clone()).or_insert_with(|| {
77 // if we changed the dimensions, clear the cache
78 *closure
.get_mut() = None
;
80 Index(elements
.len() - 1)
84 /// Applies the (partial) function to each edge and returns a new
85 /// relation. If `f` returns `None` for any end-point, returns
87 pub fn maybe_map
<F
, U
>(&self, mut f
: F
) -> Option
<TransitiveRelation
<U
>>
89 F
: FnMut(&T
) -> Option
<U
>,
90 U
: Clone
+ Debug
+ Eq
+ Hash
+ Clone
,
92 let mut result
= TransitiveRelation
::default();
93 for edge
in &self.edges
{
94 result
.add(f(&self.elements
[edge
.source
.0])?
, f(&self.elements
[edge
.target
.0])?
);
99 /// Indicate that `a < b` (where `<` is this relation)
100 pub fn add(&mut self, a
: T
, b
: T
) {
101 let a
= self.add_index(a
);
102 let b
= self.add_index(b
);
103 let edge
= Edge { source: a, target: b }
;
104 if !self.edges
.contains(&edge
) {
105 self.edges
.push(edge
);
107 // added an edge, clear the cache
108 *self.closure
.get_mut() = None
;
112 /// Checks whether `a < target` (transitively)
113 pub fn contains(&self, a
: &T
, b
: &T
) -> bool
{
114 match (self.index(a
), self.index(b
)) {
115 (Some(a
), Some(b
)) => self.with_closure(|closure
| closure
.contains(a
.0, b
.0)),
116 (None
, _
) | (_
, None
) => false,
120 /// Thinking of `x R y` as an edge `x -> y` in a graph, this
121 /// returns all things reachable from `a`.
123 /// Really this probably ought to be `impl Iterator<Item = &T>`, but
124 /// I'm too lazy to make that work, and -- given the caching
125 /// strategy -- it'd be a touch tricky anyhow.
126 pub fn reachable_from(&self, a
: &T
) -> Vec
<&T
> {
127 match self.index(a
) {
129 self.with_closure(|closure
| closure
.iter(a
.0).map(|i
| &self.elements
[i
]).collect())
135 /// Picks what I am referring to as the "postdominating"
136 /// upper-bound for `a` and `b`. This is usually the least upper
137 /// bound, but in cases where there is no single least upper
138 /// bound, it is the "mutual immediate postdominator", if you
139 /// imagine a graph where `a < b` means `a -> b`.
141 /// This function is needed because region inference currently
142 /// requires that we produce a single "UB", and there is no best
143 /// choice for the LUB. Rather than pick arbitrarily, I pick a
144 /// less good, but predictable choice. This should help ensure
145 /// that region inference yields predictable results (though it
146 /// itself is not fully sufficient).
148 /// Examples are probably clearer than any prose I could write
149 /// (there are corresponding tests below, btw). In each case,
150 /// the query is `postdom_upper_bound(a, b)`:
153 /// // Returns Some(x), which is also LUB.
159 /// // Returns `Some(x)`, which is not LUB (there is none)
160 /// // diagonal edges run left-to-right.
166 /// // Returns `None`.
170 pub fn postdom_upper_bound(&self, a
: &T
, b
: &T
) -> Option
<&T
> {
171 let mubs
= self.minimal_upper_bounds(a
, b
);
172 self.mutual_immediate_postdominator(mubs
)
175 /// Viewing the relation as a graph, computes the "mutual
176 /// immediate postdominator" of a set of points (if one
177 /// exists). See `postdom_upper_bound` for details.
178 pub fn mutual_immediate_postdominator
<'a
>(&'a
self, mut mubs
: Vec
<&'a T
>) -> Option
<&'a T
> {
182 1 => return Some(mubs
[0]),
184 let m
= mubs
.pop().unwrap();
185 let n
= mubs
.pop().unwrap();
186 mubs
.extend(self.minimal_upper_bounds(n
, m
));
192 /// Returns the set of bounds `X` such that:
194 /// - `a < X` and `b < X`
195 /// - there is no `Y != X` such that `a < Y` and `Y < X`
196 /// - except for the case where `X < a` (i.e., a strongly connected
197 /// component in the graph). In that case, the smallest
198 /// representative of the SCC is returned (as determined by the
199 /// internal indices).
201 /// Note that this set can, in principle, have any size.
202 pub fn minimal_upper_bounds(&self, a
: &T
, b
: &T
) -> Vec
<&T
> {
203 let (mut a
, mut b
) = match (self.index(a
), self.index(b
)) {
204 (Some(a
), Some(b
)) => (a
, b
),
205 (None
, _
) | (_
, None
) => {
210 // in some cases, there are some arbitrary choices to be made;
211 // it doesn't really matter what we pick, as long as we pick
212 // the same thing consistently when queried, so ensure that
213 // (a, b) are in a consistent relative order
215 mem
::swap(&mut a
, &mut b
);
218 let lub_indices
= self.with_closure(|closure
| {
219 // Easy case is when either a < b or b < a:
220 if closure
.contains(a
.0, b
.0) {
223 if closure
.contains(b
.0, a
.0) {
227 // Otherwise, the tricky part is that there may be some c
228 // where a < c and b < c. In fact, there may be many such
229 // values. So here is what we do:
231 // 1. Find the vector `[X | a < X && b < X]` of all values
232 // `X` where `a < X` and `b < X`. In terms of the
233 // graph, this means all values reachable from both `a`
234 // and `b`. Note that this vector is also a set, but we
235 // use the term vector because the order matters
236 // to the steps below.
237 // - This vector contains upper bounds, but they are
238 // not minimal upper bounds. So you may have e.g.
239 // `[x, y, tcx, z]` where `x < tcx` and `y < tcx` and
240 // `z < x` and `z < y`:
242 // z --+---> x ----+----> tcx
247 // In this case, we really want to return just `[z]`.
248 // The following steps below achieve this by gradually
249 // reducing the list.
250 // 2. Pare down the vector using `pare_down`. This will
251 // remove elements from the vector that can be reached
252 // by an earlier element.
253 // - In the example above, this would convert `[x, y,
254 // tcx, z]` to `[x, y, z]`. Note that `x` and `y` are
255 // still in the vector; this is because while `z < x`
256 // (and `z < y`) holds, `z` comes after them in the
258 // 3. Reverse the vector and repeat the pare down process.
259 // - In the example above, we would reverse to
260 // `[z, y, x]` and then pare down to `[z]`.
261 // 4. Reverse once more just so that we yield a vector in
262 // increasing order of index. Not necessary, but why not.
264 // I believe this algorithm yields a minimal set. The
265 // argument is that, after step 2, we know that no element
266 // can reach its successors (in the vector, not the graph).
267 // After step 3, we know that no element can reach any of
268 // its predecesssors (because of step 2) nor successors
269 // (because we just called `pare_down`)
271 // This same algorithm is used in `parents` below.
273 let mut candidates
= closure
.intersect_rows(a
.0, b
.0); // (1)
274 pare_down(&mut candidates
, closure
); // (2)
275 candidates
.reverse(); // (3a)
276 pare_down(&mut candidates
, closure
); // (3b)
283 .map(|i
| &self.elements
[i
])
287 /// Given an element A, returns the maximal set {B} of elements B
292 /// - for each i, j: B[i] R B[j] does not hold
294 /// The intuition is that this moves "one step up" through a lattice
295 /// (where the relation is encoding the `<=` relation for the lattice).
296 /// So e.g., if the relation is `->` and we have
304 /// then `parents(a)` returns `[b, c]`. The `postdom_parent` function
305 /// would further reduce this to just `f`.
306 pub fn parents(&self, a
: &T
) -> Vec
<&T
> {
307 let a
= match self.index(a
) {
309 None
=> return vec
![],
312 // Steal the algorithm for `minimal_upper_bounds` above, but
313 // with a slight tweak. In the case where `a R a`, we remove
314 // that from the set of candidates.
315 let ancestors
= self.with_closure(|closure
| {
316 let mut ancestors
= closure
.intersect_rows(a
.0, a
.0);
318 // Remove anything that can reach `a`. If this is a
319 // reflexive relation, this will include `a` itself.
320 ancestors
.retain(|&e
| !closure
.contains(e
, a
.0));
322 pare_down(&mut ancestors
, closure
); // (2)
323 ancestors
.reverse(); // (3a)
324 pare_down(&mut ancestors
, closure
); // (3b)
331 .map(|i
| &self.elements
[i
])
335 /// A "best" parent in some sense. See `parents` and
336 /// `postdom_upper_bound` for more details.
337 pub fn postdom_parent(&self, a
: &T
) -> Option
<&T
> {
338 self.mutual_immediate_postdominator(self.parents(a
))
341 fn with_closure
<OP
, R
>(&self, op
: OP
) -> R
343 OP
: FnOnce(&BitMatrix
<usize, usize>) -> R
,
345 let mut closure_cell
= self.closure
.borrow_mut();
346 let mut closure
= closure_cell
.take();
347 if closure
.is_none() {
348 closure
= Some(self.compute_closure());
350 let result
= op(closure
.as_ref().unwrap());
351 *closure_cell
= closure
;
355 fn compute_closure(&self) -> BitMatrix
<usize, usize> {
356 let mut matrix
= BitMatrix
::new(self.elements
.len(), self.elements
.len());
357 let mut changed
= true;
360 for edge
in &self.edges
{
361 // add an edge from S -> T
362 changed
|= matrix
.insert(edge
.source
.0, edge
.target
.0);
364 // add all outgoing edges from T into S
365 changed
|= matrix
.union_rows(edge
.target
.0, edge
.source
.0);
371 /// Lists all the base edges in the graph: the initial _non-transitive_ set of element
372 /// relations, which will be later used as the basis for the transitive closure computation.
373 pub fn base_edges(&self) -> impl Iterator
<Item
= (&T
, &T
)> {
376 .map(move |edge
| (&self.elements
[edge
.source
.0], &self.elements
[edge
.target
.0]))
380 /// Pare down is used as a step in the LUB computation. It edits the
381 /// candidates array in place by removing any element j for which
382 /// there exists an earlier element i<j such that i -> j. That is,
383 /// after you run `pare_down`, you know that for all elements that
384 /// remain in candidates, they cannot reach any of the elements that
387 /// Examples follow. Assume that a -> b -> c and x -> y -> z.
389 /// - Input: `[a, b, x]`. Output: `[a, x]`.
390 /// - Input: `[b, a, x]`. Output: `[b, a, x]`.
391 /// - Input: `[a, x, b, y]`. Output: `[a, x]`.
392 fn pare_down(candidates
: &mut Vec
<usize>, closure
: &BitMatrix
<usize, usize>) {
394 while i
< candidates
.len() {
395 let candidate_i
= candidates
[i
];
400 while j
< candidates
.len() {
401 let candidate_j
= candidates
[j
];
402 if closure
.contains(candidate_i
, candidate_j
) {
403 // If `i` can reach `j`, then we can remove `j`. So just
404 // mark it as dead and move on; subsequent indices will be
405 // shifted into its place.
408 candidates
[j
- dead
] = candidate_j
;
412 candidates
.truncate(j
- dead
);
416 impl<T
> Encodable
for TransitiveRelation
<T
>
418 T
: Clone
+ Encodable
+ Debug
+ Eq
+ Hash
+ Clone
,
420 fn encode
<E
: Encoder
>(&self, s
: &mut E
) -> Result
<(), E
::Error
> {
421 s
.emit_struct("TransitiveRelation", 2, |s
| {
422 s
.emit_struct_field("elements", 0, |s
| self.elements
.encode(s
))?
;
423 s
.emit_struct_field("edges", 1, |s
| self.edges
.encode(s
))?
;
429 impl<T
> Decodable
for TransitiveRelation
<T
>
431 T
: Clone
+ Decodable
+ Debug
+ Eq
+ Hash
+ Clone
,
433 fn decode
<D
: Decoder
>(d
: &mut D
) -> Result
<Self, D
::Error
> {
434 d
.read_struct("TransitiveRelation", 2, |d
| {
435 let elements
: Vec
<T
> = d
.read_struct_field("elements", 0, |d
| Decodable
::decode(d
))?
;
436 let edges
= d
.read_struct_field("edges", 1, |d
| Decodable
::decode(d
))?
;
440 .map(|(index
, elem
)| (elem
.clone(), Index(index
)))
442 Ok(TransitiveRelation { elements, edges, map, closure: Lock::new(None) }
)
447 impl<CTX
, T
> HashStable
<CTX
> for TransitiveRelation
<T
>
449 T
: HashStable
<CTX
> + Eq
+ Debug
+ Clone
+ Hash
,
451 fn hash_stable(&self, hcx
: &mut CTX
, hasher
: &mut StableHasher
) {
452 // We are assuming here that the relation graph has been built in a
453 // deterministic way and we can just hash it the way it is.
454 let TransitiveRelation
{
457 // "map" is just a copy of elements vec
459 // "closure" is just a copy of the data above
463 elements
.hash_stable(hcx
, hasher
);
464 edges
.hash_stable(hcx
, hasher
);
468 impl<CTX
> HashStable
<CTX
> for Edge
{
469 fn hash_stable(&self, hcx
: &mut CTX
, hasher
: &mut StableHasher
) {
470 let Edge { ref source, ref target }
= *self;
472 source
.hash_stable(hcx
, hasher
);
473 target
.hash_stable(hcx
, hasher
);
477 impl<CTX
> HashStable
<CTX
> for Index
{
478 fn hash_stable(&self, hcx
: &mut CTX
, hasher
: &mut StableHasher
) {
479 let Index(idx
) = *self;
480 idx
.hash_stable(hcx
, hasher
);