]> git.proxmox.com Git - rustc.git/blob - vendor/chalk-solve/src/recursive.rs
New upstream version 1.46.0~beta.2+dfsg1
[rustc.git] / vendor / chalk-solve / src / recursive.rs
1 mod combine;
2 mod fulfill;
3 pub mod lib;
4 mod search_graph;
5 mod solve;
6 mod stack;
7
8 use self::lib::{Minimums, Solution, UCanonicalGoal};
9 use self::search_graph::{DepthFirstNumber, SearchGraph};
10 use self::solve::{SolveDatabase, SolveIteration};
11 use self::stack::{Stack, StackDepth};
12 use crate::{coinductive_goal::IsCoinductive, RustIrDatabase};
13 use chalk_ir::interner::Interner;
14 use chalk_ir::{Canonical, ConstrainedSubst, Fallible};
15 use rustc_hash::FxHashMap;
16 use tracing::{debug, info, instrument};
17
18 pub(crate) struct RecursiveContext<I: Interner> {
19 stack: Stack,
20
21 /// The "search graph" stores "in-progress results" that are still being
22 /// solved.
23 search_graph: SearchGraph<I>,
24
25 /// The "cache" stores results for goals that we have completely solved.
26 /// Things are added to the cache when we have completely processed their
27 /// result.
28 cache: FxHashMap<UCanonicalGoal<I>, Fallible<Solution<I>>>,
29
30 caching_enabled: bool,
31 }
32
33 /// A Solver is the basic context in which you can propose goals for a given
34 /// program. **All questions posed to the solver are in canonical, closed form,
35 /// so that each question is answered with effectively a "clean slate"**. This
36 /// allows for better caching, and simplifies management of the inference
37 /// context.
38 pub(crate) struct Solver<'me, I: Interner> {
39 program: &'me dyn RustIrDatabase<I>,
40 context: &'me mut RecursiveContext<I>,
41 }
42
43 /// An extension trait for merging `Result`s
44 trait MergeWith<T> {
45 fn merge_with<F>(self, other: Self, f: F) -> Self
46 where
47 F: FnOnce(T, T) -> T;
48 }
49
50 impl<T> MergeWith<T> for Fallible<T> {
51 fn merge_with<F>(self: Fallible<T>, other: Fallible<T>, f: F) -> Fallible<T>
52 where
53 F: FnOnce(T, T) -> T,
54 {
55 match (self, other) {
56 (Err(_), Ok(v)) | (Ok(v), Err(_)) => Ok(v),
57 (Ok(v1), Ok(v2)) => Ok(f(v1, v2)),
58 (Err(_), Err(e)) => Err(e),
59 }
60 }
61 }
62
63 impl<I: Interner> RecursiveContext<I> {
64 pub(crate) fn new(overflow_depth: usize, caching_enabled: bool) -> Self {
65 RecursiveContext {
66 stack: Stack::new(overflow_depth),
67 search_graph: SearchGraph::new(),
68 cache: FxHashMap::default(),
69 caching_enabled,
70 }
71 }
72
73 pub(crate) fn solver<'me>(
74 &'me mut self,
75 program: &'me dyn RustIrDatabase<I>,
76 ) -> Solver<'me, I> {
77 Solver {
78 program,
79 context: self,
80 }
81 }
82 }
83
84 impl<'me, I: Interner> Solver<'me, I> {
85 /// Solves a canonical goal. The substitution returned in the
86 /// solution will be for the fully decomposed goal. For example, given the
87 /// program
88 ///
89 /// ```ignore
90 /// struct u8 { }
91 /// struct SomeType<T> { }
92 /// trait Foo<T> { }
93 /// impl<U> Foo<u8> for SomeType<U> { }
94 /// ```
95 ///
96 /// and the goal `exists<V> { forall<U> { SomeType<U>: Foo<V> }
97 /// }`, `into_peeled_goal` can be used to create a canonical goal
98 /// `SomeType<!1>: Foo<?0>`. This function will then return a
99 /// solution with the substitution `?0 := u8`.
100 pub(crate) fn solve_root_goal(
101 &mut self,
102 canonical_goal: &UCanonicalGoal<I>,
103 ) -> Fallible<Solution<I>> {
104 debug!("solve_root_goal(canonical_goal={:?})", canonical_goal);
105 assert!(self.context.stack.is_empty());
106 let minimums = &mut Minimums::new();
107 self.solve_goal(canonical_goal.clone(), minimums)
108 }
109
110 #[instrument(level = "debug", skip(self))]
111 fn solve_new_subgoal(
112 &mut self,
113 canonical_goal: UCanonicalGoal<I>,
114 depth: StackDepth,
115 dfn: DepthFirstNumber,
116 ) -> Minimums {
117 // We start with `answer = None` and try to solve the goal. At the end of the iteration,
118 // `answer` will be updated with the result of the solving process. If we detect a cycle
119 // during the solving process, we cache `answer` and try to solve the goal again. We repeat
120 // until we reach a fixed point for `answer`.
121 // Considering the partial order:
122 // - None < Some(Unique) < Some(Ambiguous)
123 // - None < Some(CannotProve)
124 // the function which maps the loop iteration to `answer` is a nondecreasing function
125 // so this function will eventually be constant and the loop terminates.
126 loop {
127 let minimums = &mut Minimums::new();
128 let (current_answer, current_prio) = self.solve_iteration(&canonical_goal, minimums);
129
130 debug!(
131 "solve_new_subgoal: loop iteration result = {:?} with minimums {:?}",
132 current_answer, minimums
133 );
134
135 if !self.context.stack[depth].read_and_reset_cycle_flag() {
136 // None of our subgoals depended on us directly.
137 // We can return.
138 self.context.search_graph[dfn].solution = current_answer;
139 self.context.search_graph[dfn].solution_priority = current_prio;
140 return *minimums;
141 }
142
143 let old_answer = &self.context.search_graph[dfn].solution;
144 let old_prio = self.context.search_graph[dfn].solution_priority;
145
146 let (current_answer, current_prio) = combine::with_priorities_for_goal(
147 self.program.interner(),
148 &canonical_goal.canonical.value.goal,
149 old_answer.clone(),
150 old_prio,
151 current_answer,
152 current_prio,
153 );
154
155 // Some of our subgoals depended on us. We need to re-run
156 // with the current answer.
157 if self.context.search_graph[dfn].solution == current_answer {
158 // Reached a fixed point.
159 return *minimums;
160 }
161
162 let current_answer_is_ambig = match &current_answer {
163 Ok(s) => s.is_ambig(),
164 Err(_) => false,
165 };
166
167 self.context.search_graph[dfn].solution = current_answer;
168 self.context.search_graph[dfn].solution_priority = current_prio;
169
170 // Subtle: if our current answer is ambiguous, we can just stop, and
171 // in fact we *must* -- otherwise, we sometimes fail to reach a
172 // fixed point. See `multiple_ambiguous_cycles` for more.
173 if current_answer_is_ambig {
174 return *minimums;
175 }
176
177 // Otherwise: rollback the search tree and try again.
178 self.context.search_graph.rollback_to(dfn + 1);
179 }
180 }
181 }
182
183 impl<'me, I: Interner> SolveDatabase<I> for Solver<'me, I> {
184 /// Attempt to solve a goal that has been fully broken down into leaf form
185 /// and canonicalized. This is where the action really happens, and is the
186 /// place where we would perform caching in rustc (and may eventually do in Chalk).
187 #[instrument(level = "info", skip(self, minimums))]
188 fn solve_goal(
189 &mut self,
190 goal: UCanonicalGoal<I>,
191 minimums: &mut Minimums,
192 ) -> Fallible<Solution<I>> {
193 // First check the cache.
194 if let Some(value) = self.context.cache.get(&goal) {
195 debug!("solve_reduced_goal: cache hit, value={:?}", value);
196 return value.clone();
197 }
198
199 // Next, check if the goal is in the search tree already.
200 if let Some(dfn) = self.context.search_graph.lookup(&goal) {
201 // Check if this table is still on the stack.
202 if let Some(depth) = self.context.search_graph[dfn].stack_depth {
203 // Is this a coinductive goal? If so, that is success,
204 // so we can return normally. Note that this return is
205 // not tabled.
206 //
207 // XXX how does caching with coinduction work?
208 if self.context.stack.coinductive_cycle_from(depth) {
209 let value = ConstrainedSubst {
210 subst: goal.trivial_substitution(self.program.interner()),
211 constraints: vec![],
212 };
213 debug!("applying coinductive semantics");
214 return Ok(Solution::Unique(Canonical {
215 value,
216 binders: goal.canonical.binders,
217 }));
218 }
219
220 self.context.stack[depth].flag_cycle();
221 }
222
223 minimums.update_from(self.context.search_graph[dfn].links);
224
225 // Return the solution from the table.
226 let previous_solution = self.context.search_graph[dfn].solution.clone();
227 let previous_solution_priority = self.context.search_graph[dfn].solution_priority;
228 info!(
229 "solve_goal: cycle detected, previous solution {:?} with prio {:?}",
230 previous_solution, previous_solution_priority
231 );
232 previous_solution
233 } else {
234 // Otherwise, push the goal onto the stack and create a table.
235 // The initial result for this table is error.
236 let coinductive_goal = goal.is_coinductive(self.program);
237 let depth = self.context.stack.push(coinductive_goal);
238 let dfn = self.context.search_graph.insert(&goal, depth);
239 let subgoal_minimums = self.solve_new_subgoal(goal, depth, dfn);
240 self.context.search_graph[dfn].links = subgoal_minimums;
241 self.context.search_graph[dfn].stack_depth = None;
242 self.context.stack.pop(depth);
243 minimums.update_from(subgoal_minimums);
244
245 // Read final result from table.
246 let result = self.context.search_graph[dfn].solution.clone();
247 let priority = self.context.search_graph[dfn].solution_priority;
248
249 // If processing this subgoal did not involve anything
250 // outside of its subtree, then we can promote it to the
251 // cache now. This is a sort of hack to alleviate the
252 // worst of the repeated work that we do during tabling.
253 if subgoal_minimums.positive >= dfn {
254 if self.context.caching_enabled {
255 self.context
256 .search_graph
257 .move_to_cache(dfn, &mut self.context.cache);
258 debug!("solve_reduced_goal: SCC head encountered, moving to cache");
259 } else {
260 debug!(
261 "solve_reduced_goal: SCC head encountered, rolling back as caching disabled"
262 );
263 self.context.search_graph.rollback_to(dfn);
264 }
265 }
266
267 info!("solve_goal: solution = {:?} prio {:?}", result, priority);
268 result
269 }
270 }
271
272 fn interner(&self) -> &I {
273 &self.program.interner()
274 }
275
276 fn db(&self) -> &dyn RustIrDatabase<I> {
277 self.program
278 }
279 }