]>
Commit | Line | Data |
---|---|---|
54a0048b SL |
1 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
11 | //! Code to save/load the dep-graph from files. | |
12 | ||
8bb4bdeb | 13 | use rustc::dep_graph::{DepNode, WorkProductId}; |
54a0048b | 14 | use rustc::hir::def_id::DefId; |
7cac9316 | 15 | use rustc::hir::map::DefPathHash; |
9e0c209e | 16 | use rustc::hir::svh::Svh; |
cc61c64b | 17 | use rustc::ich::Fingerprint; |
5bcae85e | 18 | use rustc::session::Session; |
a7813a04 | 19 | use rustc::ty::TyCtxt; |
476ff2be | 20 | use rustc_data_structures::fx::{FxHashSet, FxHashMap}; |
54a0048b | 21 | use rustc_serialize::Decodable as RustcDecodable; |
9e0c209e | 22 | use rustc_serialize::opaque::Decoder; |
7cac9316 | 23 | use std::default::Default; |
5bcae85e | 24 | use std::path::{Path}; |
8bb4bdeb | 25 | use std::sync::Arc; |
54a0048b | 26 | |
9e0c209e | 27 | use IncrementalHashesMap; |
54a0048b | 28 | use super::data::*; |
54a0048b | 29 | use super::dirty_clean; |
a7813a04 | 30 | use super::hash::*; |
9e0c209e SL |
31 | use super::fs::*; |
32 | use super::file_format; | |
32a655c1 | 33 | use super::work_product; |
54a0048b | 34 | |
8bb4bdeb XL |
35 | // The key is a dirty node. The value is **some** base-input that we |
36 | // can blame it on. | |
7cac9316 | 37 | pub type DirtyNodes = FxHashMap<DepNode<DefPathHash>, DepNode<DefPathHash>>; |
54a0048b | 38 | |
54a0048b SL |
39 | /// If we are in incremental mode, and a previous dep-graph exists, |
40 | /// then load up those nodes/edges that are still valid into the | |
41 | /// dep-graph for this session. (This is assumed to be running very | |
42 | /// early in compilation, before we've really done any work, but | |
43 | /// actually it doesn't matter all that much.) See `README.md` for | |
44 | /// more general overview. | |
9e0c209e SL |
45 | pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
46 | incremental_hashes_map: &IncrementalHashesMap) { | |
5bcae85e SL |
47 | if tcx.sess.opts.incremental.is_none() { |
48 | return; | |
49 | } | |
50 | ||
9e0c209e SL |
51 | match prepare_session_directory(tcx) { |
52 | Ok(true) => { | |
53 | // We successfully allocated a session directory and there is | |
54 | // something in it to load, so continue | |
55 | } | |
56 | Ok(false) => { | |
57 | // We successfully allocated a session directory, but there is no | |
58 | // dep-graph data in it to load (because this is the first | |
59 | // compilation session with this incr. comp. dir.) | |
60 | return | |
61 | } | |
62 | Err(()) => { | |
63 | // Something went wrong while trying to allocate the session | |
64 | // directory. Don't try to use it any further. | |
65 | return | |
66 | } | |
67 | } | |
68 | ||
54a0048b | 69 | let _ignore = tcx.dep_graph.in_ignore(); |
9e0c209e | 70 | load_dep_graph_if_exists(tcx, incremental_hashes_map); |
5bcae85e SL |
71 | } |
72 | ||
9e0c209e SL |
73 | fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
74 | incremental_hashes_map: &IncrementalHashesMap) { | |
75 | let dep_graph_path = dep_graph_path(tcx.sess); | |
5bcae85e SL |
76 | let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) { |
77 | Some(p) => p, | |
78 | None => return // no file | |
79 | }; | |
80 | ||
9e0c209e | 81 | let work_products_path = work_products_path(tcx.sess); |
5bcae85e SL |
82 | let work_products_data = match load_data(tcx.sess, &work_products_path) { |
83 | Some(p) => p, | |
84 | None => return // no file | |
85 | }; | |
54a0048b | 86 | |
9e0c209e | 87 | match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) { |
5bcae85e SL |
88 | Ok(dirty_nodes) => dirty_nodes, |
89 | Err(err) => { | |
90 | tcx.sess.warn( | |
91 | &format!("decoding error in dep-graph from `{}` and `{}`: {}", | |
92 | dep_graph_path.display(), | |
93 | work_products_path.display(), | |
94 | err)); | |
95 | } | |
54a0048b SL |
96 | } |
97 | } | |
98 | ||
5bcae85e | 99 | fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> { |
476ff2be | 100 | match file_format::read_file(sess, path) { |
9e0c209e SL |
101 | Ok(Some(data)) => return Some(data), |
102 | Ok(None) => { | |
103 | // The file either didn't exist or was produced by an incompatible | |
104 | // compiler version. Neither is an error. | |
5bcae85e | 105 | } |
54a0048b | 106 | Err(err) => { |
5bcae85e | 107 | sess.err( |
54a0048b SL |
108 | &format!("could not load dep-graph from `{}`: {}", |
109 | path.display(), err)); | |
54a0048b SL |
110 | } |
111 | } | |
9e0c209e SL |
112 | |
113 | if let Err(err) = delete_all_session_dir_contents(sess) { | |
114 | sess.err(&format!("could not clear incompatible incremental \ | |
115 | compilation session directory `{}`: {}", | |
116 | path.display(), err)); | |
117 | } | |
118 | ||
119 | None | |
54a0048b SL |
120 | } |
121 | ||
7cac9316 XL |
122 | /// Try to convert a DepNode from the old dep-graph into a DepNode in the |
123 | /// current graph by mapping the DefPathHash to a valid DefId. This will fail | |
124 | /// if the DefPathHash refers to something that has been removed (because | |
125 | /// there is no DefId for that thing anymore). | |
126 | fn retrace(tcx: TyCtxt, dep_node: &DepNode<DefPathHash>) -> Option<DepNode<DefId>> { | |
127 | dep_node.map_def(|def_path_hash| { | |
128 | tcx.def_path_hash_to_def_id.as_ref().unwrap().get(def_path_hash).cloned() | |
129 | }) | |
130 | } | |
131 | ||
5bcae85e SL |
132 | /// Decode the dep graph and load the edges/nodes that are still clean |
133 | /// into `tcx.dep_graph`. | |
a7813a04 | 134 | pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
9e0c209e | 135 | incremental_hashes_map: &IncrementalHashesMap, |
5bcae85e SL |
136 | dep_graph_data: &[u8], |
137 | work_products_data: &[u8]) | |
9e0c209e | 138 | -> Result<(), String> |
54a0048b | 139 | { |
5bcae85e SL |
140 | // Decode the list of work_products |
141 | let mut work_product_decoder = Decoder::new(work_products_data, 0); | |
9e0c209e | 142 | let work_products = <Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder)?; |
5bcae85e | 143 | |
54a0048b | 144 | // Deserialize the directory and dep-graph. |
5bcae85e | 145 | let mut dep_graph_decoder = Decoder::new(dep_graph_data, 0); |
9e0c209e | 146 | let prev_commandline_args_hash = u64::decode(&mut dep_graph_decoder)?; |
5bcae85e SL |
147 | |
148 | if prev_commandline_args_hash != tcx.sess.opts.dep_tracking_hash() { | |
476ff2be SL |
149 | if tcx.sess.opts.debugging_opts.incremental_info { |
150 | println!("incremental: completely ignoring cache because of \ | |
151 | differing commandline arguments"); | |
152 | } | |
5bcae85e SL |
153 | // We can't reuse the cache, purge it. |
154 | debug!("decode_dep_graph: differing commandline arg hashes"); | |
155 | for swp in work_products { | |
156 | delete_dirty_work_product(tcx, swp); | |
157 | } | |
158 | ||
159 | // No need to do any further work | |
160 | return Ok(()); | |
161 | } | |
54a0048b | 162 | |
9e0c209e | 163 | let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?; |
54a0048b | 164 | |
7cac9316 XL |
165 | let edge_map: FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>> = { |
166 | let capacity = serialized_dep_graph.edge_list_data.len(); | |
167 | let mut edge_map = FxHashMap::with_capacity_and_hasher(capacity, Default::default()); | |
168 | ||
169 | for (node_index, source) in serialized_dep_graph.nodes.iter().enumerate() { | |
170 | let (start, end) = serialized_dep_graph.edge_list_indices[node_index]; | |
171 | let targets = | |
172 | (&serialized_dep_graph.edge_list_data[start as usize .. end as usize]) | |
173 | .into_iter() | |
174 | .map(|&node_index| serialized_dep_graph.nodes[node_index].clone()) | |
175 | .collect(); | |
176 | ||
177 | edge_map.insert(source.clone(), targets); | |
178 | } | |
8bb4bdeb | 179 | |
7cac9316 XL |
180 | edge_map |
181 | }; | |
54a0048b | 182 | |
8bb4bdeb XL |
183 | // Compute the set of nodes from the old graph where some input |
184 | // has changed or been removed. These are "raw" source nodes, | |
185 | // which means that they still use the original `DefPathIndex` | |
186 | // values from the encoding, rather than having been retraced to a | |
187 | // `DefId`. The reason for this is that this way we can include | |
188 | // nodes that have been removed (which no longer have a `DefId` in | |
189 | // the current compilation). | |
190 | let dirty_raw_nodes = initial_dirty_nodes(tcx, | |
191 | incremental_hashes_map, | |
7cac9316 | 192 | &serialized_dep_graph.hashes); |
8bb4bdeb XL |
193 | let dirty_raw_nodes = transitive_dirty_nodes(&edge_map, dirty_raw_nodes); |
194 | ||
195 | // Recreate the edges in the graph that are still clean. | |
196 | let mut clean_work_products = FxHashSet(); | |
197 | let mut dirty_work_products = FxHashSet(); // incomplete; just used to suppress debug output | |
198 | let mut extra_edges = vec![]; | |
199 | for (source, targets) in &edge_map { | |
200 | for target in targets { | |
7cac9316 | 201 | process_edges(tcx, source, target, &edge_map, &dirty_raw_nodes, |
8bb4bdeb | 202 | &mut clean_work_products, &mut dirty_work_products, &mut extra_edges); |
5bcae85e SL |
203 | } |
204 | } | |
205 | ||
8bb4bdeb XL |
206 | // Recreate bootstrap outputs, which are outputs that have no incoming edges (and hence cannot |
207 | // be dirty). | |
208 | for bootstrap_output in &serialized_dep_graph.bootstrap_outputs { | |
7cac9316 | 209 | if let Some(n) = retrace(tcx, bootstrap_output) { |
8bb4bdeb XL |
210 | if let DepNode::WorkProduct(ref wp) = n { |
211 | clean_work_products.insert(wp.clone()); | |
212 | } | |
5bcae85e | 213 | |
8bb4bdeb | 214 | tcx.dep_graph.with_task(n, (), (), create_node); |
5bcae85e | 215 | |
8bb4bdeb XL |
216 | fn create_node((): (), (): ()) { |
217 | // just create the node with no inputs | |
218 | } | |
219 | } | |
220 | } | |
54a0048b | 221 | |
8bb4bdeb XL |
222 | // Subtle. Sometimes we have intermediate nodes that we can't recreate in the new graph. |
223 | // This is pretty unusual but it arises in a scenario like this: | |
224 | // | |
225 | // Hir(X) -> Foo(Y) -> Bar | |
226 | // | |
227 | // Note that the `Hir(Y)` is not an input to `Foo(Y)` -- this | |
228 | // almost never happens, but can happen in some obscure | |
229 | // scenarios. In that case, if `Y` is removed, then we can't | |
230 | // recreate `Foo(Y)` (the def-id `Y` no longer exists); what we do | |
231 | // then is to push the edge `Hir(X) -> Bar` onto `extra_edges` | |
232 | // (along with any other targets of `Foo(Y)`). We will then add | |
233 | // the edge from `Hir(X)` to `Bar` (or, if `Bar` itself cannot be | |
234 | // recreated, to the targets of `Bar`). | |
235 | while let Some((source, target)) = extra_edges.pop() { | |
7cac9316 | 236 | process_edges(tcx, source, target, &edge_map, &dirty_raw_nodes, |
8bb4bdeb | 237 | &mut clean_work_products, &mut dirty_work_products, &mut extra_edges); |
54a0048b SL |
238 | } |
239 | ||
5bcae85e SL |
240 | // Add in work-products that are still clean, and delete those that are |
241 | // dirty. | |
8bb4bdeb | 242 | reconcile_work_products(tcx, work_products, &clean_work_products); |
5bcae85e | 243 | |
7cac9316 | 244 | dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_nodes); |
5bcae85e | 245 | |
9e0c209e | 246 | load_prev_metadata_hashes(tcx, |
9e0c209e | 247 | &mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut()); |
54a0048b SL |
248 | Ok(()) |
249 | } | |
250 | ||
5bcae85e SL |
251 | /// Computes which of the original set of def-ids are dirty. Stored in |
252 | /// a bit vector where the index is the DefPathIndex. | |
8bb4bdeb XL |
253 | fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
254 | incremental_hashes_map: &IncrementalHashesMap, | |
7cac9316 | 255 | serialized_hashes: &[SerializedHash]) |
8bb4bdeb | 256 | -> DirtyNodes { |
9e0c209e | 257 | let mut hcx = HashContext::new(tcx, incremental_hashes_map); |
8bb4bdeb | 258 | let mut dirty_nodes = FxHashMap(); |
5bcae85e | 259 | |
7cac9316 XL |
260 | let print_removed_message = |dep_node: &DepNode<_>| { |
261 | if tcx.sess.opts.debugging_opts.incremental_dump_hash { | |
262 | println!("node {:?} is dirty as it was removed", dep_node); | |
263 | } | |
264 | ||
265 | debug!("initial_dirty_nodes: {:?} is dirty as it was removed", dep_node); | |
266 | }; | |
267 | ||
9e0c209e | 268 | for hash in serialized_hashes { |
7cac9316 XL |
269 | if let Some(dep_node) = retrace(tcx, &hash.dep_node) { |
270 | if let Some(current_hash) = hcx.hash(&dep_node) { | |
271 | if current_hash == hash.hash { | |
272 | debug!("initial_dirty_nodes: {:?} is clean (hash={:?})", | |
273 | dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), | |
274 | current_hash); | |
275 | continue; | |
276 | } | |
476ff2be | 277 | |
7cac9316 XL |
278 | if tcx.sess.opts.debugging_opts.incremental_dump_hash { |
279 | println!("node {:?} is dirty as hash is {:?} was {:?}", | |
280 | dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), | |
281 | current_hash, | |
282 | hash.hash); | |
283 | } | |
476ff2be | 284 | |
7cac9316 XL |
285 | debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}", |
286 | dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), | |
287 | current_hash, | |
288 | hash.hash); | |
289 | } else { | |
290 | print_removed_message(&hash.dep_node); | |
476ff2be | 291 | } |
7cac9316 XL |
292 | } else { |
293 | print_removed_message(&hash.dep_node); | |
54a0048b | 294 | } |
54a0048b | 295 | |
8bb4bdeb | 296 | dirty_nodes.insert(hash.dep_node.clone(), hash.dep_node.clone()); |
54a0048b SL |
297 | } |
298 | ||
299 | dirty_nodes | |
300 | } | |
301 | ||
7cac9316 | 302 | fn transitive_dirty_nodes(edge_map: &FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>>, |
8bb4bdeb XL |
303 | mut dirty_nodes: DirtyNodes) |
304 | -> DirtyNodes | |
305 | { | |
7cac9316 | 306 | let mut stack: Vec<(DepNode<DefPathHash>, DepNode<DefPathHash>)> = vec![]; |
8bb4bdeb XL |
307 | stack.extend(dirty_nodes.iter().map(|(s, b)| (s.clone(), b.clone()))); |
308 | while let Some((source, blame)) = stack.pop() { | |
309 | // we know the source is dirty (because of the node `blame`)... | |
310 | assert!(dirty_nodes.contains_key(&source)); | |
311 | ||
312 | // ...so we dirty all the targets (with the same blame) | |
313 | if let Some(targets) = edge_map.get(&source) { | |
314 | for target in targets { | |
315 | if !dirty_nodes.contains_key(target) { | |
316 | dirty_nodes.insert(target.clone(), blame.clone()); | |
317 | stack.push((target.clone(), blame.clone())); | |
318 | } | |
319 | } | |
320 | } | |
321 | } | |
322 | dirty_nodes | |
323 | } | |
324 | ||
5bcae85e SL |
325 | /// Go through the list of work-products produced in the previous run. |
326 | /// Delete any whose nodes have been found to be dirty or which are | |
327 | /// otherwise no longer applicable. | |
328 | fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
329 | work_products: Vec<SerializedWorkProduct>, | |
8bb4bdeb | 330 | clean_work_products: &FxHashSet<Arc<WorkProductId>>) { |
5bcae85e SL |
331 | debug!("reconcile_work_products({:?})", work_products); |
332 | for swp in work_products { | |
8bb4bdeb | 333 | if !clean_work_products.contains(&swp.id) { |
5bcae85e SL |
334 | debug!("reconcile_work_products: dep-node for {:?} is dirty", swp); |
335 | delete_dirty_work_product(tcx, swp); | |
336 | } else { | |
476ff2be SL |
337 | let mut all_files_exist = true; |
338 | for &(_, ref file_name) in swp.work_product.saved_files.iter() { | |
339 | let path = in_incr_comp_dir_sess(tcx.sess, file_name); | |
340 | if !path.exists() { | |
341 | all_files_exist = false; | |
342 | ||
343 | if tcx.sess.opts.debugging_opts.incremental_info { | |
344 | println!("incremental: could not find file for up-to-date work product: {}", | |
345 | path.display()); | |
346 | } | |
347 | } | |
348 | } | |
349 | ||
5bcae85e SL |
350 | if all_files_exist { |
351 | debug!("reconcile_work_products: all files for {:?} exist", swp); | |
352 | tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product); | |
54a0048b | 353 | } else { |
5bcae85e SL |
354 | debug!("reconcile_work_products: some file for {:?} does not exist", swp); |
355 | delete_dirty_work_product(tcx, swp); | |
54a0048b | 356 | } |
54a0048b SL |
357 | } |
358 | } | |
5bcae85e | 359 | } |
54a0048b | 360 | |
5bcae85e SL |
361 | fn delete_dirty_work_product(tcx: TyCtxt, |
362 | swp: SerializedWorkProduct) { | |
363 | debug!("delete_dirty_work_product({:?})", swp); | |
32a655c1 | 364 | work_product::delete_workproduct_files(tcx.sess, &swp.work_product); |
54a0048b | 365 | } |
9e0c209e SL |
366 | |
367 | fn load_prev_metadata_hashes(tcx: TyCtxt, | |
476ff2be | 368 | output: &mut FxHashMap<DefId, Fingerprint>) { |
9e0c209e SL |
369 | if !tcx.sess.opts.debugging_opts.query_dep_graph { |
370 | return | |
371 | } | |
372 | ||
373 | debug!("load_prev_metadata_hashes() - Loading previous metadata hashes"); | |
374 | ||
375 | let file_path = metadata_hash_export_path(tcx.sess); | |
376 | ||
377 | if !file_path.exists() { | |
378 | debug!("load_prev_metadata_hashes() - Couldn't find file containing \ | |
379 | hashes at `{}`", file_path.display()); | |
380 | return | |
381 | } | |
382 | ||
383 | debug!("load_prev_metadata_hashes() - File: {}", file_path.display()); | |
384 | ||
476ff2be | 385 | let data = match file_format::read_file(tcx.sess, &file_path) { |
9e0c209e SL |
386 | Ok(Some(data)) => data, |
387 | Ok(None) => { | |
388 | debug!("load_prev_metadata_hashes() - File produced by incompatible \ | |
389 | compiler version: {}", file_path.display()); | |
390 | return | |
391 | } | |
392 | Err(err) => { | |
393 | debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}", | |
394 | file_path.display(), err); | |
395 | return | |
396 | } | |
397 | }; | |
398 | ||
399 | debug!("load_prev_metadata_hashes() - Decoding hashes"); | |
400 | let mut decoder = Decoder::new(&data, 0); | |
401 | let _ = Svh::decode(&mut decoder).unwrap(); | |
402 | let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap(); | |
403 | ||
404 | debug!("load_prev_metadata_hashes() - Mapping DefIds"); | |
405 | ||
7cac9316 XL |
406 | assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.entry_hashes.len()); |
407 | let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap(); | |
408 | ||
409 | for serialized_hash in serialized_hashes.entry_hashes { | |
410 | let def_path_hash = serialized_hashes.index_map[&serialized_hash.def_index]; | |
411 | if let Some(&def_id) = def_path_hash_to_def_id.get(&def_path_hash) { | |
9e0c209e SL |
412 | let old = output.insert(def_id, serialized_hash.hash); |
413 | assert!(old.is_none(), "already have hash for {:?}", def_id); | |
414 | } | |
415 | } | |
416 | ||
417 | debug!("load_prev_metadata_hashes() - successfully loaded {} hashes", | |
418 | serialized_hashes.index_map.len()); | |
419 | } | |
420 | ||
8bb4bdeb XL |
421 | fn process_edges<'a, 'tcx, 'edges>( |
422 | tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
7cac9316 XL |
423 | source: &'edges DepNode<DefPathHash>, |
424 | target: &'edges DepNode<DefPathHash>, | |
425 | edges: &'edges FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>>, | |
8bb4bdeb XL |
426 | dirty_raw_nodes: &DirtyNodes, |
427 | clean_work_products: &mut FxHashSet<Arc<WorkProductId>>, | |
428 | dirty_work_products: &mut FxHashSet<Arc<WorkProductId>>, | |
7cac9316 | 429 | extra_edges: &mut Vec<(&'edges DepNode<DefPathHash>, &'edges DepNode<DefPathHash>)>) |
8bb4bdeb XL |
430 | { |
431 | // If the target is dirty, skip the edge. If this is an edge | |
432 | // that targets a work-product, we can print the blame | |
433 | // information now. | |
434 | if let Some(blame) = dirty_raw_nodes.get(target) { | |
435 | if let DepNode::WorkProduct(ref wp) = *target { | |
436 | if tcx.sess.opts.debugging_opts.incremental_info { | |
437 | if dirty_work_products.insert(wp.clone()) { | |
7cac9316 XL |
438 | // Try to reconstruct the human-readable version of the |
439 | // DepNode. This cannot be done for things that where | |
440 | // removed. | |
441 | let readable_blame = if let Some(dep_node) = retrace(tcx, blame) { | |
442 | dep_node.map_def(|&def_id| Some(tcx.def_path(def_id).to_string(tcx))) | |
443 | .unwrap() | |
444 | } else { | |
445 | blame.map_def(|def_path_hash| Some(format!("{:?}", def_path_hash))) | |
446 | .unwrap() | |
447 | }; | |
448 | ||
8bb4bdeb XL |
449 | println!("incremental: module {:?} is dirty because {:?} \ |
450 | changed or was removed", | |
451 | wp, | |
7cac9316 | 452 | readable_blame); |
8bb4bdeb XL |
453 | } |
454 | } | |
455 | } | |
456 | return; | |
457 | } | |
458 | ||
459 | // If the source is dirty, the target will be dirty. | |
460 | assert!(!dirty_raw_nodes.contains_key(source)); | |
461 | ||
462 | // Retrace the source -> target edges to def-ids and then create | |
463 | // an edge in the graph. Retracing may yield none if some of the | |
464 | // data happens to have been removed. | |
7cac9316 XL |
465 | if let Some(source_node) = retrace(tcx, source) { |
466 | if let Some(target_node) = retrace(tcx, target) { | |
8bb4bdeb XL |
467 | let _task = tcx.dep_graph.in_task(target_node); |
468 | tcx.dep_graph.read(source_node); | |
469 | if let DepNode::WorkProduct(ref wp) = *target { | |
470 | clean_work_products.insert(wp.clone()); | |
471 | } | |
472 | } else { | |
473 | // As discussed in `decode_dep_graph` above, sometimes the | |
474 | // target cannot be recreated again, in which case we add | |
475 | // edges to go from `source` to the targets of `target`. | |
476 | extra_edges.extend( | |
477 | edges[target].iter().map(|t| (source, t))); | |
478 | } | |
479 | } else { | |
480 | // It's also possible that the source can't be created! But we | |
481 | // can ignore such cases, because (a) if `source` is a HIR | |
482 | // node, it would be considered dirty; and (b) in other cases, | |
483 | // there must be some input to this node that is clean, and so | |
484 | // we'll re-create the edges over in the case where target is | |
485 | // undefined. | |
486 | } | |
487 | } | |
488 |