]>
Commit | Line | Data |
---|---|---|
54a0048b SL |
1 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
11 | //! Code to save/load the dep-graph from files. | |
12 | ||
54a0048b SL |
13 | use rustc::dep_graph::DepNode; |
14 | use rustc::hir::def_id::DefId; | |
9e0c209e | 15 | use rustc::hir::svh::Svh; |
5bcae85e | 16 | use rustc::session::Session; |
a7813a04 | 17 | use rustc::ty::TyCtxt; |
9e0c209e | 18 | use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap}; |
54a0048b | 19 | use rustc_serialize::Decodable as RustcDecodable; |
9e0c209e SL |
20 | use rustc_serialize::opaque::Decoder; |
21 | use std::fs; | |
5bcae85e | 22 | use std::path::{Path}; |
54a0048b | 23 | |
9e0c209e | 24 | use IncrementalHashesMap; |
c30ab7b3 | 25 | use ich::Fingerprint; |
54a0048b SL |
26 | use super::data::*; |
27 | use super::directory::*; | |
28 | use super::dirty_clean; | |
a7813a04 | 29 | use super::hash::*; |
9e0c209e SL |
30 | use super::fs::*; |
31 | use super::file_format; | |
54a0048b | 32 | |
5bcae85e | 33 | pub type DirtyNodes = FnvHashSet<DepNode<DefPathIndex>>; |
54a0048b | 34 | |
54a0048b SL |
35 | /// If we are in incremental mode, and a previous dep-graph exists, |
36 | /// then load up those nodes/edges that are still valid into the | |
37 | /// dep-graph for this session. (This is assumed to be running very | |
38 | /// early in compilation, before we've really done any work, but | |
39 | /// actually it doesn't matter all that much.) See `README.md` for | |
40 | /// more general overview. | |
9e0c209e SL |
41 | pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
42 | incremental_hashes_map: &IncrementalHashesMap) { | |
5bcae85e SL |
43 | if tcx.sess.opts.incremental.is_none() { |
44 | return; | |
45 | } | |
46 | ||
9e0c209e SL |
47 | match prepare_session_directory(tcx) { |
48 | Ok(true) => { | |
49 | // We successfully allocated a session directory and there is | |
50 | // something in it to load, so continue | |
51 | } | |
52 | Ok(false) => { | |
53 | // We successfully allocated a session directory, but there is no | |
54 | // dep-graph data in it to load (because this is the first | |
55 | // compilation session with this incr. comp. dir.) | |
56 | return | |
57 | } | |
58 | Err(()) => { | |
59 | // Something went wrong while trying to allocate the session | |
60 | // directory. Don't try to use it any further. | |
61 | return | |
62 | } | |
63 | } | |
64 | ||
54a0048b | 65 | let _ignore = tcx.dep_graph.in_ignore(); |
9e0c209e | 66 | load_dep_graph_if_exists(tcx, incremental_hashes_map); |
5bcae85e SL |
67 | } |
68 | ||
9e0c209e SL |
69 | fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
70 | incremental_hashes_map: &IncrementalHashesMap) { | |
71 | let dep_graph_path = dep_graph_path(tcx.sess); | |
5bcae85e SL |
72 | let dep_graph_data = match load_data(tcx.sess, &dep_graph_path) { |
73 | Some(p) => p, | |
74 | None => return // no file | |
75 | }; | |
76 | ||
9e0c209e | 77 | let work_products_path = work_products_path(tcx.sess); |
5bcae85e SL |
78 | let work_products_data = match load_data(tcx.sess, &work_products_path) { |
79 | Some(p) => p, | |
80 | None => return // no file | |
81 | }; | |
54a0048b | 82 | |
9e0c209e | 83 | match decode_dep_graph(tcx, incremental_hashes_map, &dep_graph_data, &work_products_data) { |
5bcae85e SL |
84 | Ok(dirty_nodes) => dirty_nodes, |
85 | Err(err) => { | |
86 | tcx.sess.warn( | |
87 | &format!("decoding error in dep-graph from `{}` and `{}`: {}", | |
88 | dep_graph_path.display(), | |
89 | work_products_path.display(), | |
90 | err)); | |
91 | } | |
54a0048b SL |
92 | } |
93 | } | |
94 | ||
5bcae85e | 95 | fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> { |
9e0c209e SL |
96 | match file_format::read_file(path) { |
97 | Ok(Some(data)) => return Some(data), | |
98 | Ok(None) => { | |
99 | // The file either didn't exist or was produced by an incompatible | |
100 | // compiler version. Neither is an error. | |
5bcae85e | 101 | } |
54a0048b | 102 | Err(err) => { |
5bcae85e | 103 | sess.err( |
54a0048b SL |
104 | &format!("could not load dep-graph from `{}`: {}", |
105 | path.display(), err)); | |
54a0048b SL |
106 | } |
107 | } | |
9e0c209e SL |
108 | |
109 | if let Err(err) = delete_all_session_dir_contents(sess) { | |
110 | sess.err(&format!("could not clear incompatible incremental \ | |
111 | compilation session directory `{}`: {}", | |
112 | path.display(), err)); | |
113 | } | |
114 | ||
115 | None | |
54a0048b SL |
116 | } |
117 | ||
5bcae85e SL |
118 | /// Decode the dep graph and load the edges/nodes that are still clean |
119 | /// into `tcx.dep_graph`. | |
a7813a04 | 120 | pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
9e0c209e | 121 | incremental_hashes_map: &IncrementalHashesMap, |
5bcae85e SL |
122 | dep_graph_data: &[u8], |
123 | work_products_data: &[u8]) | |
9e0c209e | 124 | -> Result<(), String> |
54a0048b | 125 | { |
5bcae85e SL |
126 | // Decode the list of work_products |
127 | let mut work_product_decoder = Decoder::new(work_products_data, 0); | |
9e0c209e | 128 | let work_products = <Vec<SerializedWorkProduct>>::decode(&mut work_product_decoder)?; |
5bcae85e | 129 | |
54a0048b | 130 | // Deserialize the directory and dep-graph. |
5bcae85e | 131 | let mut dep_graph_decoder = Decoder::new(dep_graph_data, 0); |
9e0c209e | 132 | let prev_commandline_args_hash = u64::decode(&mut dep_graph_decoder)?; |
5bcae85e SL |
133 | |
134 | if prev_commandline_args_hash != tcx.sess.opts.dep_tracking_hash() { | |
135 | // We can't reuse the cache, purge it. | |
136 | debug!("decode_dep_graph: differing commandline arg hashes"); | |
137 | for swp in work_products { | |
138 | delete_dirty_work_product(tcx, swp); | |
139 | } | |
140 | ||
141 | // No need to do any further work | |
142 | return Ok(()); | |
143 | } | |
54a0048b | 144 | |
9e0c209e SL |
145 | let directory = DefIdDirectory::decode(&mut dep_graph_decoder)?; |
146 | let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?; | |
54a0048b SL |
147 | |
148 | // Retrace the paths in the directory to find their current location (if any). | |
149 | let retraced = directory.retrace(tcx); | |
150 | ||
5bcae85e SL |
151 | // Compute the set of Hir nodes whose data has changed or which |
152 | // have been removed. These are "raw" source nodes, which means | |
153 | // that they still use the original `DefPathIndex` values from the | |
154 | // encoding, rather than having been retraced to a `DefId`. The | |
155 | // reason for this is that this way we can include nodes that have | |
156 | // been removed (which no longer have a `DefId` in the current | |
157 | // compilation). | |
9e0c209e SL |
158 | let dirty_raw_source_nodes = dirty_nodes(tcx, |
159 | incremental_hashes_map, | |
160 | &serialized_dep_graph.hashes, | |
161 | &retraced); | |
5bcae85e SL |
162 | |
163 | // Create a list of (raw-source-node -> | |
164 | // retracted-target-node) edges. In the process of retracing the | |
165 | // target nodes, we may discover some of them def-paths no longer exist, | |
166 | // in which case there is no need to mark the corresopnding nodes as dirty | |
167 | // (they are just not present). So this list may be smaller than the original. | |
168 | // | |
169 | // Note though that in the common case the target nodes are | |
170 | // `DepNode::WorkProduct` instances, and those don't have a | |
171 | // def-id, so they will never be considered to not exist. Instead, | |
172 | // we do a secondary hashing step (later, in trans) when we know | |
173 | // the set of symbols that go into a work-product: if any symbols | |
174 | // have been removed (or added) the hash will be different and | |
175 | // we'll ignore the work-product then. | |
176 | let retraced_edges: Vec<_> = | |
177 | serialized_dep_graph.edges.iter() | |
178 | .filter_map(|&(ref raw_source_node, ref raw_target_node)| { | |
179 | retraced.map(raw_target_node) | |
180 | .map(|target_node| (raw_source_node, target_node)) | |
181 | }) | |
182 | .collect(); | |
183 | ||
184 | // Compute which work-products have an input that has changed or | |
185 | // been removed. Put the dirty ones into a set. | |
186 | let mut dirty_target_nodes = FnvHashSet(); | |
187 | for &(raw_source_node, ref target_node) in &retraced_edges { | |
188 | if dirty_raw_source_nodes.contains(raw_source_node) { | |
189 | if !dirty_target_nodes.contains(target_node) { | |
190 | dirty_target_nodes.insert(target_node.clone()); | |
191 | ||
192 | if tcx.sess.opts.debugging_opts.incremental_info { | |
193 | // It'd be nice to pretty-print these paths better than just | |
194 | // using the `Debug` impls, but wev. | |
195 | println!("module {:?} is dirty because {:?} changed or was removed", | |
196 | target_node, | |
197 | raw_source_node.map_def(|&index| { | |
198 | Some(directory.def_path_string(tcx, index)) | |
199 | }).unwrap()); | |
200 | } | |
201 | } | |
202 | } | |
203 | } | |
204 | ||
205 | // For work-products that are still clean, add their deps into the | |
206 | // graph. This is needed because later we will have to save this | |
207 | // back out again! | |
54a0048b | 208 | let dep_graph = tcx.dep_graph.clone(); |
5bcae85e SL |
209 | for (raw_source_node, target_node) in retraced_edges { |
210 | if dirty_target_nodes.contains(&target_node) { | |
211 | continue; | |
212 | } | |
213 | ||
214 | let source_node = retraced.map(raw_source_node).unwrap(); | |
215 | ||
216 | debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source_node, target_node); | |
54a0048b | 217 | |
5bcae85e SL |
218 | let _task = dep_graph.in_task(target_node); |
219 | dep_graph.read(source_node); | |
54a0048b SL |
220 | } |
221 | ||
5bcae85e SL |
222 | // Add in work-products that are still clean, and delete those that are |
223 | // dirty. | |
224 | reconcile_work_products(tcx, work_products, &dirty_target_nodes); | |
225 | ||
226 | dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_source_nodes, &retraced); | |
227 | ||
9e0c209e SL |
228 | load_prev_metadata_hashes(tcx, |
229 | &retraced, | |
230 | &mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut()); | |
54a0048b SL |
231 | Ok(()) |
232 | } | |
233 | ||
5bcae85e SL |
234 | /// Computes which of the original set of def-ids are dirty. Stored in |
235 | /// a bit vector where the index is the DefPathIndex. | |
236 | fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
9e0c209e SL |
237 | incremental_hashes_map: &IncrementalHashesMap, |
238 | serialized_hashes: &[SerializedHash], | |
5bcae85e SL |
239 | retraced: &RetracedDefIdDirectory) |
240 | -> DirtyNodes { | |
9e0c209e | 241 | let mut hcx = HashContext::new(tcx, incremental_hashes_map); |
54a0048b | 242 | let mut dirty_nodes = FnvHashSet(); |
5bcae85e | 243 | |
9e0c209e | 244 | for hash in serialized_hashes { |
5bcae85e | 245 | if let Some(dep_node) = retraced.map(&hash.dep_node) { |
9e0c209e | 246 | let current_hash = hcx.hash(&dep_node).unwrap(); |
5bcae85e | 247 | if current_hash == hash.hash { |
9e0c209e SL |
248 | debug!("initial_dirty_nodes: {:?} is clean (hash={:?})", |
249 | dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), | |
250 | current_hash); | |
5bcae85e | 251 | continue; |
54a0048b | 252 | } |
5bcae85e SL |
253 | debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}", |
254 | dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), | |
255 | current_hash, | |
256 | hash.hash); | |
257 | } else { | |
258 | debug!("initial_dirty_nodes: {:?} is dirty as it was removed", | |
259 | hash.dep_node); | |
54a0048b | 260 | } |
54a0048b | 261 | |
5bcae85e | 262 | dirty_nodes.insert(hash.dep_node.clone()); |
54a0048b SL |
263 | } |
264 | ||
265 | dirty_nodes | |
266 | } | |
267 | ||
5bcae85e SL |
268 | /// Go through the list of work-products produced in the previous run. |
269 | /// Delete any whose nodes have been found to be dirty or which are | |
270 | /// otherwise no longer applicable. | |
271 | fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, | |
272 | work_products: Vec<SerializedWorkProduct>, | |
273 | dirty_target_nodes: &FnvHashSet<DepNode<DefId>>) { | |
274 | debug!("reconcile_work_products({:?})", work_products); | |
275 | for swp in work_products { | |
276 | if dirty_target_nodes.contains(&DepNode::WorkProduct(swp.id.clone())) { | |
277 | debug!("reconcile_work_products: dep-node for {:?} is dirty", swp); | |
278 | delete_dirty_work_product(tcx, swp); | |
279 | } else { | |
280 | let all_files_exist = | |
281 | swp.work_product | |
282 | .saved_files | |
283 | .iter() | |
284 | .all(|&(_, ref file_name)| { | |
9e0c209e | 285 | let path = in_incr_comp_dir_sess(tcx.sess, &file_name); |
5bcae85e SL |
286 | path.exists() |
287 | }); | |
288 | if all_files_exist { | |
289 | debug!("reconcile_work_products: all files for {:?} exist", swp); | |
290 | tcx.dep_graph.insert_previous_work_product(&swp.id, swp.work_product); | |
54a0048b | 291 | } else { |
5bcae85e SL |
292 | debug!("reconcile_work_products: some file for {:?} does not exist", swp); |
293 | delete_dirty_work_product(tcx, swp); | |
54a0048b | 294 | } |
54a0048b SL |
295 | } |
296 | } | |
5bcae85e | 297 | } |
54a0048b | 298 | |
5bcae85e SL |
299 | fn delete_dirty_work_product(tcx: TyCtxt, |
300 | swp: SerializedWorkProduct) { | |
301 | debug!("delete_dirty_work_product({:?})", swp); | |
302 | for &(_, ref file_name) in &swp.work_product.saved_files { | |
9e0c209e | 303 | let path = in_incr_comp_dir_sess(tcx.sess, file_name); |
5bcae85e SL |
304 | match fs::remove_file(&path) { |
305 | Ok(()) => { } | |
306 | Err(err) => { | |
307 | tcx.sess.warn( | |
308 | &format!("file-system error deleting outdated file `{}`: {}", | |
309 | path.display(), err)); | |
54a0048b SL |
310 | } |
311 | } | |
312 | } | |
54a0048b | 313 | } |
9e0c209e SL |
314 | |
315 | fn load_prev_metadata_hashes(tcx: TyCtxt, | |
316 | retraced: &RetracedDefIdDirectory, | |
c30ab7b3 | 317 | output: &mut FnvHashMap<DefId, Fingerprint>) { |
9e0c209e SL |
318 | if !tcx.sess.opts.debugging_opts.query_dep_graph { |
319 | return | |
320 | } | |
321 | ||
322 | debug!("load_prev_metadata_hashes() - Loading previous metadata hashes"); | |
323 | ||
324 | let file_path = metadata_hash_export_path(tcx.sess); | |
325 | ||
326 | if !file_path.exists() { | |
327 | debug!("load_prev_metadata_hashes() - Couldn't find file containing \ | |
328 | hashes at `{}`", file_path.display()); | |
329 | return | |
330 | } | |
331 | ||
332 | debug!("load_prev_metadata_hashes() - File: {}", file_path.display()); | |
333 | ||
334 | let data = match file_format::read_file(&file_path) { | |
335 | Ok(Some(data)) => data, | |
336 | Ok(None) => { | |
337 | debug!("load_prev_metadata_hashes() - File produced by incompatible \ | |
338 | compiler version: {}", file_path.display()); | |
339 | return | |
340 | } | |
341 | Err(err) => { | |
342 | debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}", | |
343 | file_path.display(), err); | |
344 | return | |
345 | } | |
346 | }; | |
347 | ||
348 | debug!("load_prev_metadata_hashes() - Decoding hashes"); | |
349 | let mut decoder = Decoder::new(&data, 0); | |
350 | let _ = Svh::decode(&mut decoder).unwrap(); | |
351 | let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap(); | |
352 | ||
353 | debug!("load_prev_metadata_hashes() - Mapping DefIds"); | |
354 | ||
355 | assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len()); | |
356 | for serialized_hash in serialized_hashes.hashes { | |
357 | let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index]; | |
358 | if let Some(def_id) = retraced.def_id(def_path_index) { | |
359 | let old = output.insert(def_id, serialized_hash.hash); | |
360 | assert!(old.is_none(), "already have hash for {:?}", def_id); | |
361 | } | |
362 | } | |
363 | ||
364 | debug!("load_prev_metadata_hashes() - successfully loaded {} hashes", | |
365 | serialized_hashes.index_map.len()); | |
366 | } | |
367 |