]>
Commit | Line | Data |
---|---|---|
54a0048b SL |
1 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
54a0048b | 11 | use rustc::dep_graph::DepNode; |
5bcae85e | 12 | use rustc::hir::def_id::DefId; |
9e0c209e | 13 | use rustc::hir::svh::Svh; |
5bcae85e | 14 | use rustc::session::Session; |
a7813a04 | 15 | use rustc::ty::TyCtxt; |
5bcae85e SL |
16 | use rustc_data_structures::fnv::FnvHashMap; |
17 | use rustc_serialize::Encodable as RustcEncodable; | |
9e0c209e | 18 | use rustc_serialize::opaque::Encoder; |
c30ab7b3 | 19 | use std::hash::Hash; |
54a0048b SL |
20 | use std::io::{self, Cursor, Write}; |
21 | use std::fs::{self, File}; | |
a7813a04 | 22 | use std::path::PathBuf; |
54a0048b | 23 | |
9e0c209e | 24 | use IncrementalHashesMap; |
c30ab7b3 | 25 | use ich::Fingerprint; |
54a0048b SL |
26 | use super::data::*; |
27 | use super::directory::*; | |
a7813a04 | 28 | use super::hash::*; |
5bcae85e | 29 | use super::preds::*; |
9e0c209e SL |
30 | use super::fs::*; |
31 | use super::dirty_clean; | |
32 | use super::file_format; | |
c30ab7b3 | 33 | use calculate_svh::hasher::IchHasher; |
54a0048b | 34 | |
9e0c209e SL |
35 | pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, |
36 | incremental_hashes_map: &IncrementalHashesMap, | |
37 | svh: Svh) { | |
5bcae85e | 38 | debug!("save_dep_graph()"); |
54a0048b | 39 | let _ignore = tcx.dep_graph.in_ignore(); |
5bcae85e SL |
40 | let sess = tcx.sess; |
41 | if sess.opts.incremental.is_none() { | |
42 | return; | |
43 | } | |
9e0c209e | 44 | |
5bcae85e SL |
45 | let mut builder = DefIdDirectoryBuilder::new(tcx); |
46 | let query = tcx.dep_graph.query(); | |
9e0c209e | 47 | let mut hcx = HashContext::new(tcx, incremental_hashes_map); |
5bcae85e | 48 | let preds = Predecessors::new(&query, &mut hcx); |
9e0c209e SL |
49 | let mut current_metadata_hashes = FnvHashMap(); |
50 | ||
51 | // IMPORTANT: We are saving the metadata hashes *before* the dep-graph, | |
52 | // since metadata-encoding might add new entries to the | |
53 | // DefIdDirectory (which is saved in the dep-graph file). | |
5bcae85e | 54 | save_in(sess, |
9e0c209e SL |
55 | metadata_hash_export_path(sess), |
56 | |e| encode_metadata_hashes(tcx, | |
57 | svh, | |
58 | &preds, | |
59 | &mut builder, | |
60 | &mut current_metadata_hashes, | |
61 | e)); | |
5bcae85e | 62 | save_in(sess, |
9e0c209e SL |
63 | dep_graph_path(sess), |
64 | |e| encode_dep_graph(&preds, &mut builder, e)); | |
65 | ||
66 | let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow(); | |
67 | dirty_clean::check_dirty_clean_metadata(tcx, | |
68 | &*prev_metadata_hashes, | |
69 | ¤t_metadata_hashes); | |
a7813a04 | 70 | } |
54a0048b | 71 | |
9e0c209e SL |
72 | pub fn save_work_products(sess: &Session) { |
73 | if sess.opts.incremental.is_none() { | |
74 | return; | |
75 | } | |
76 | ||
5bcae85e SL |
77 | debug!("save_work_products()"); |
78 | let _ignore = sess.dep_graph.in_ignore(); | |
9e0c209e | 79 | let path = work_products_path(sess); |
5bcae85e SL |
80 | save_in(sess, path, |e| encode_work_products(sess, e)); |
81 | } | |
a7813a04 | 82 | |
9e0c209e | 83 | fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F) |
5bcae85e SL |
84 | where F: FnOnce(&mut Encoder) -> io::Result<()> |
85 | { | |
9e0c209e | 86 | debug!("save: storing data in {}", path_buf.display()); |
54a0048b | 87 | |
a7813a04 | 88 | // delete the old dep-graph, if any |
9e0c209e SL |
89 | // Note: It's important that we actually delete the old file and not just |
90 | // truncate and overwrite it, since it might be a shared hard-link, the | |
91 | // underlying data of which we don't want to modify | |
a7813a04 XL |
92 | if path_buf.exists() { |
93 | match fs::remove_file(&path_buf) { | |
9e0c209e SL |
94 | Ok(()) => { |
95 | debug!("save: remove old file"); | |
96 | } | |
54a0048b | 97 | Err(err) => { |
5bcae85e SL |
98 | sess.err(&format!("unable to delete old dep-graph at `{}`: {}", |
99 | path_buf.display(), | |
100 | err)); | |
54a0048b SL |
101 | return; |
102 | } | |
103 | } | |
a7813a04 | 104 | } |
54a0048b | 105 | |
a7813a04 XL |
106 | // generate the data in a memory buffer |
107 | let mut wr = Cursor::new(Vec::new()); | |
9e0c209e | 108 | file_format::write_file_header(&mut wr).unwrap(); |
5bcae85e SL |
109 | match encode(&mut Encoder::new(&mut wr)) { |
110 | Ok(()) => {} | |
a7813a04 | 111 | Err(err) => { |
5bcae85e SL |
112 | sess.err(&format!("could not encode dep-graph to `{}`: {}", |
113 | path_buf.display(), | |
114 | err)); | |
a7813a04 XL |
115 | return; |
116 | } | |
117 | } | |
118 | ||
119 | // write the data out | |
120 | let data = wr.into_inner(); | |
5bcae85e | 121 | match File::create(&path_buf).and_then(|mut file| file.write_all(&data)) { |
9e0c209e SL |
122 | Ok(_) => { |
123 | debug!("save: data written to disk successfully"); | |
124 | } | |
a7813a04 | 125 | Err(err) => { |
5bcae85e SL |
126 | sess.err(&format!("failed to write dep-graph to `{}`: {}", |
127 | path_buf.display(), | |
128 | err)); | |
a7813a04 | 129 | return; |
54a0048b SL |
130 | } |
131 | } | |
132 | } | |
133 | ||
5bcae85e SL |
134 | pub fn encode_dep_graph(preds: &Predecessors, |
135 | builder: &mut DefIdDirectoryBuilder, | |
136 | encoder: &mut Encoder) | |
137 | -> io::Result<()> { | |
138 | // First encode the commandline arguments hash | |
139 | let tcx = builder.tcx(); | |
9e0c209e | 140 | tcx.sess.opts.dep_tracking_hash().encode(encoder)?; |
5bcae85e SL |
141 | |
142 | // Create a flat list of (Input, WorkProduct) edges for | |
143 | // serialization. | |
144 | let mut edges = vec![]; | |
145 | for (&target, sources) in &preds.inputs { | |
146 | match *target { | |
147 | DepNode::MetaData(ref def_id) => { | |
148 | // Metadata *targets* are always local metadata nodes. We handle | |
149 | // those in `encode_metadata_hashes`, which comes later. | |
150 | assert!(def_id.is_local()); | |
151 | continue; | |
152 | } | |
153 | _ => (), | |
154 | } | |
155 | let target = builder.map(target); | |
156 | for &source in sources { | |
157 | let source = builder.map(source); | |
158 | edges.push((source, target.clone())); | |
159 | } | |
160 | } | |
54a0048b | 161 | |
a7813a04 | 162 | // Create the serialized dep-graph. |
54a0048b | 163 | let graph = SerializedDepGraph { |
5bcae85e SL |
164 | edges: edges, |
165 | hashes: preds.hashes | |
166 | .iter() | |
167 | .map(|(&dep_node, &hash)| { | |
168 | SerializedHash { | |
169 | dep_node: builder.map(dep_node), | |
170 | hash: hash, | |
171 | } | |
172 | }) | |
173 | .collect(), | |
54a0048b SL |
174 | }; |
175 | ||
176 | debug!("graph = {:#?}", graph); | |
177 | ||
178 | // Encode the directory and then the graph data. | |
9e0c209e SL |
179 | builder.directory().encode(encoder)?; |
180 | graph.encode(encoder)?; | |
54a0048b SL |
181 | |
182 | Ok(()) | |
183 | } | |
184 | ||
5bcae85e | 185 | pub fn encode_metadata_hashes(tcx: TyCtxt, |
9e0c209e | 186 | svh: Svh, |
5bcae85e SL |
187 | preds: &Predecessors, |
188 | builder: &mut DefIdDirectoryBuilder, | |
c30ab7b3 | 189 | current_metadata_hashes: &mut FnvHashMap<DefId, Fingerprint>, |
5bcae85e SL |
190 | encoder: &mut Encoder) |
191 | -> io::Result<()> { | |
5bcae85e SL |
192 | // For each `MetaData(X)` node where `X` is local, accumulate a |
193 | // hash. These are the metadata items we export. Downstream | |
194 | // crates will want to see a hash that tells them whether we might | |
195 | // have changed the metadata for a given item since they last | |
196 | // compiled. | |
197 | // | |
198 | // (I initially wrote this with an iterator, but it seemed harder to read.) | |
9e0c209e SL |
199 | let mut serialized_hashes = SerializedMetadataHashes { |
200 | hashes: vec![], | |
201 | index_map: FnvHashMap() | |
202 | }; | |
203 | ||
204 | let mut def_id_hashes = FnvHashMap(); | |
205 | ||
5bcae85e SL |
206 | for (&target, sources) in &preds.inputs { |
207 | let def_id = match *target { | |
208 | DepNode::MetaData(def_id) => { | |
209 | assert!(def_id.is_local()); | |
210 | def_id | |
211 | } | |
212 | _ => continue, | |
213 | }; | |
a7813a04 | 214 | |
9e0c209e SL |
215 | let mut def_id_hash = |def_id: DefId| -> u64 { |
216 | *def_id_hashes.entry(def_id) | |
217 | .or_insert_with(|| { | |
218 | let index = builder.add(def_id); | |
219 | let path = builder.lookup_def_path(index); | |
220 | path.deterministic_hash(tcx) | |
221 | }) | |
222 | }; | |
223 | ||
a7813a04 XL |
224 | // To create the hash for each item `X`, we don't hash the raw |
225 | // bytes of the metadata (though in principle we | |
226 | // could). Instead, we walk the predecessors of `MetaData(X)` | |
227 | // from the dep-graph. This corresponds to all the inputs that | |
228 | // were read to construct the metadata. To create the hash for | |
229 | // the metadata, we hash (the hash of) all of those inputs. | |
5bcae85e SL |
230 | debug!("save: computing metadata hash for {:?}", def_id); |
231 | ||
232 | // Create a vector containing a pair of (source-id, hash). | |
233 | // The source-id is stored as a `DepNode<u64>`, where the u64 | |
234 | // is the det. hash of the def-path. This is convenient | |
235 | // because we can sort this to get a stable ordering across | |
236 | // compilations, even if the def-ids themselves have changed. | |
c30ab7b3 | 237 | let mut hashes: Vec<(DepNode<u64>, Fingerprint)> = sources.iter() |
5bcae85e SL |
238 | .map(|dep_node| { |
239 | let hash_dep_node = dep_node.map_def(|&def_id| Some(def_id_hash(def_id))).unwrap(); | |
240 | let hash = preds.hashes[dep_node]; | |
241 | (hash_dep_node, hash) | |
242 | }) | |
243 | .collect(); | |
244 | ||
245 | hashes.sort(); | |
c30ab7b3 | 246 | let mut state = IchHasher::new(); |
5bcae85e SL |
247 | hashes.hash(&mut state); |
248 | let hash = state.finish(); | |
249 | ||
250 | debug!("save: metadata hash for {:?} is {}", def_id, hash); | |
251 | serialized_hashes.hashes.push(SerializedMetadataHash { | |
252 | def_index: def_id.index, | |
253 | hash: hash, | |
254 | }); | |
255 | } | |
a7813a04 | 256 | |
9e0c209e SL |
257 | if tcx.sess.opts.debugging_opts.query_dep_graph { |
258 | for serialized_hash in &serialized_hashes.hashes { | |
259 | let def_id = DefId::local(serialized_hash.def_index); | |
260 | ||
261 | // Store entry in the index_map | |
262 | let def_path_index = builder.add(def_id); | |
263 | serialized_hashes.index_map.insert(def_id.index, def_path_index); | |
264 | ||
265 | // Record hash in current_metadata_hashes | |
266 | current_metadata_hashes.insert(def_id, serialized_hash.hash); | |
267 | } | |
268 | ||
269 | debug!("save: stored index_map (len={}) for serialized hashes", | |
270 | serialized_hashes.index_map.len()); | |
271 | } | |
272 | ||
a7813a04 | 273 | // Encode everything. |
9e0c209e SL |
274 | svh.encode(encoder)?; |
275 | serialized_hashes.encode(encoder)?; | |
a7813a04 XL |
276 | |
277 | Ok(()) | |
278 | } | |
5bcae85e SL |
279 | |
280 | pub fn encode_work_products(sess: &Session, encoder: &mut Encoder) -> io::Result<()> { | |
281 | let work_products: Vec<_> = sess.dep_graph | |
282 | .work_products() | |
283 | .iter() | |
284 | .map(|(id, work_product)| { | |
285 | SerializedWorkProduct { | |
286 | id: id.clone(), | |
287 | work_product: work_product.clone(), | |
288 | } | |
289 | }) | |
290 | .collect(); | |
291 | ||
292 | work_products.encode(encoder) | |
293 | } |