]> git.proxmox.com Git - rustc.git/blob - src/librustc_incremental/persist/load.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / librustc_incremental / persist / load.rs
1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 //! Code to save/load the dep-graph from files.
12
13 use calculate_svh::SvhCalculate;
14 use rbml::Error;
15 use rbml::opaque::Decoder;
16 use rustc::dep_graph::DepNode;
17 use rustc::hir::def_id::DefId;
18 use rustc::ty;
19 use rustc_data_structures::fnv::FnvHashSet;
20 use rustc_serialize::Decodable as RustcDecodable;
21 use std::io::Read;
22 use std::fs::File;
23 use std::path::Path;
24
25 use super::data::*;
26 use super::directory::*;
27 use super::dirty_clean;
28 use super::util::*;
29
30 type DirtyNodes = FnvHashSet<DepNode<DefId>>;
31
32 type CleanEdges = Vec<(DepNode<DefId>, DepNode<DefId>)>;
33
34 /// If we are in incremental mode, and a previous dep-graph exists,
35 /// then load up those nodes/edges that are still valid into the
36 /// dep-graph for this session. (This is assumed to be running very
37 /// early in compilation, before we've really done any work, but
38 /// actually it doesn't matter all that much.) See `README.md` for
39 /// more general overview.
40 pub fn load_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>) {
41 let _ignore = tcx.dep_graph.in_ignore();
42
43 if let Some(dep_graph) = dep_graph_path(tcx) {
44 // FIXME(#32754) lock file?
45 load_dep_graph_if_exists(tcx, &dep_graph);
46 dirty_clean::check_dirty_clean_annotations(tcx);
47 }
48 }
49
50 pub fn load_dep_graph_if_exists<'tcx>(tcx: &ty::TyCtxt<'tcx>, path: &Path) {
51 if !path.exists() {
52 return;
53 }
54
55 let mut data = vec![];
56 match
57 File::open(path)
58 .and_then(|mut file| file.read_to_end(&mut data))
59 {
60 Ok(_) => { }
61 Err(err) => {
62 tcx.sess.err(
63 &format!("could not load dep-graph from `{}`: {}",
64 path.display(), err));
65 return;
66 }
67 }
68
69 match decode_dep_graph(tcx, &data) {
70 Ok(dirty) => dirty,
71 Err(err) => {
72 bug!("decoding error in dep-graph from `{}`: {}", path.display(), err);
73 }
74 }
75 }
76
77 pub fn decode_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>, data: &[u8])
78 -> Result<(), Error>
79 {
80 // Deserialize the directory and dep-graph.
81 let mut decoder = Decoder::new(data, 0);
82 let directory = try!(DefIdDirectory::decode(&mut decoder));
83 let serialized_dep_graph = try!(SerializedDepGraph::decode(&mut decoder));
84
85 debug!("decode_dep_graph: directory = {:#?}", directory);
86 debug!("decode_dep_graph: serialized_dep_graph = {:#?}", serialized_dep_graph);
87
88 // Retrace the paths in the directory to find their current location (if any).
89 let retraced = directory.retrace(tcx);
90
91 debug!("decode_dep_graph: retraced = {:#?}", retraced);
92
93 // Compute the set of Hir nodes whose data has changed.
94 let mut dirty_nodes =
95 initial_dirty_nodes(tcx, &serialized_dep_graph.hashes, &retraced);
96
97 debug!("decode_dep_graph: initial dirty_nodes = {:#?}", dirty_nodes);
98
99 // Find all DepNodes reachable from that core set. This loop
100 // iterates repeatedly over the list of edges whose source is not
101 // known to be dirty (`clean_edges`). If it finds an edge whose
102 // source is dirty, it removes it from that list and adds the
103 // target to `dirty_nodes`. It stops when it reaches a fixed
104 // point.
105 let clean_edges = compute_clean_edges(&serialized_dep_graph.edges,
106 &retraced,
107 &mut dirty_nodes);
108
109 // Add synthetic `foo->foo` edges for each clean node `foo` that
110 // we had before. This is sort of a hack to create clean nodes in
111 // the graph, since the existence of a node is a signal that the
112 // work it represents need not be repeated.
113 let clean_nodes =
114 serialized_dep_graph.nodes
115 .iter()
116 .filter_map(|&node| retraced.map(node))
117 .filter(|node| !dirty_nodes.contains(node))
118 .map(|node| (node, node));
119
120 // Add nodes and edges that are not dirty into our main graph.
121 let dep_graph = tcx.dep_graph.clone();
122 for (source, target) in clean_edges.into_iter().chain(clean_nodes) {
123 let _task = dep_graph.in_task(target);
124 dep_graph.read(source);
125
126 debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source, target);
127 }
128
129 Ok(())
130 }
131
132 fn initial_dirty_nodes<'tcx>(tcx: &ty::TyCtxt<'tcx>,
133 hashed_items: &[SerializedHash],
134 retraced: &RetracedDefIdDirectory)
135 -> DirtyNodes {
136 let mut items_removed = false;
137 let mut dirty_nodes = FnvHashSet();
138 for hashed_item in hashed_items {
139 match retraced.def_id(hashed_item.index) {
140 Some(def_id) => {
141 // FIXME(#32753) -- should we use a distinct hash here
142 let current_hash = tcx.calculate_item_hash(def_id);
143 debug!("initial_dirty_nodes: hash of {:?} is {:?}, was {:?}",
144 def_id, current_hash, hashed_item.hash);
145 if current_hash != hashed_item.hash {
146 dirty_nodes.insert(DepNode::Hir(def_id));
147 }
148 }
149 None => {
150 items_removed = true;
151 }
152 }
153 }
154
155 // If any of the items in the krate have changed, then we consider
156 // the meta-node `Krate` to be dirty, since that means something
157 // which (potentially) read the contents of every single item.
158 if items_removed || !dirty_nodes.is_empty() {
159 dirty_nodes.insert(DepNode::Krate);
160 }
161
162 dirty_nodes
163 }
164
165 fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
166 retraced: &RetracedDefIdDirectory,
167 dirty_nodes: &mut DirtyNodes)
168 -> CleanEdges {
169 // Build up an initial list of edges. Include an edge (source,
170 // target) if neither node has been removed. If the source has
171 // been removed, add target to the list of dirty nodes.
172 let mut clean_edges = Vec::with_capacity(serialized_edges.len());
173 for &(serialized_source, serialized_target) in serialized_edges {
174 if let Some(target) = retraced.map(serialized_target) {
175 if let Some(source) = retraced.map(serialized_source) {
176 clean_edges.push((source, target))
177 } else {
178 // source removed, target must be dirty
179 dirty_nodes.insert(target);
180 }
181 } else {
182 // target removed, ignore the edge
183 }
184 }
185
186 debug!("compute_clean_edges: dirty_nodes={:#?}", dirty_nodes);
187
188 // Propagate dirty marks by iterating repeatedly over
189 // `clean_edges`. If we find an edge `(source, target)` where
190 // `source` is dirty, add `target` to the list of dirty nodes and
191 // remove it. Keep doing this until we find no more dirty nodes.
192 let mut previous_size = 0;
193 while dirty_nodes.len() > previous_size {
194 debug!("compute_clean_edges: previous_size={}", previous_size);
195 previous_size = dirty_nodes.len();
196 let mut i = 0;
197 while i < clean_edges.len() {
198 if dirty_nodes.contains(&clean_edges[i].0) {
199 let (source, target) = clean_edges.swap_remove(i);
200 debug!("compute_clean_edges: dirty source {:?} -> {:?}",
201 source, target);
202 dirty_nodes.insert(target);
203 } else if dirty_nodes.contains(&clean_edges[i].1) {
204 let (source, target) = clean_edges.swap_remove(i);
205 debug!("compute_clean_edges: dirty target {:?} -> {:?}",
206 source, target);
207 } else {
208 i += 1;
209 }
210 }
211 }
212
213 clean_edges
214 }