]>
Commit | Line | Data |
---|---|---|
9a8d695b | 1 | use std::collections::{BTreeSet, HashMap, HashSet}; |
509c5b44 | 2 | use std::path::PathBuf; |
bd31c081 | 3 | use std::sync::{Arc, Mutex}; |
1e1412fd | 4 | |
b1b9b79c | 5 | use filetime::FileTime; |
cbf25a9b AC |
6 | use jobserver::Client; |
7 | ||
1c779ac5 | 8 | use crate::core::compiler::{self, compilation, Unit}; |
1f14fa31 | 9 | use crate::core::PackageId; |
04ddd4d0 | 10 | use crate::util::errors::{CargoResult, CargoResultExt}; |
e0bd9e23 | 11 | use crate::util::profile; |
1e1412fd | 12 | |
dae87a26 | 13 | use super::build_plan::BuildPlan; |
bd31c081 | 14 | use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; |
a40d3b03 | 15 | use super::fingerprint::Fingerprint; |
4e2e0191 | 16 | use super::job_queue::JobQueue; |
41579bad | 17 | use super::layout::Layout; |
e2219254 | 18 | use super::lto::Lto; |
e0bd9e23 | 19 | use super::unit_graph::UnitDep; |
ef425b77 | 20 | use super::{BuildContext, Compilation, CompileKind, CompileMode, Executor, FileFlavor}; |
9f5d9b81 | 21 | |
cf0bb13d | 22 | mod compilation_files; |
72e6b9d3 | 23 | use self::compilation_files::CompilationFiles; |
dae87a26 | 24 | pub use self::compilation_files::{Metadata, OutputFile}; |
cf0bb13d | 25 | |
bd31c081 | 26 | /// Collection of all the stuff that is needed to perform a build. |
46615d29 | 27 | pub struct Context<'a, 'cfg> { |
bd31c081 | 28 | /// Mostly static information about the build task. |
c32e395c | 29 | pub bcx: &'a BuildContext<'a, 'cfg>, |
bd31c081 | 30 | /// A large collection of information about the result of the entire compilation. |
c32e395c | 31 | pub compilation: Compilation<'cfg>, |
bd31c081 EH |
32 | /// Output from build scripts, updated after each build script runs. |
33 | pub build_script_outputs: Arc<Mutex<BuildScriptOutputs>>, | |
34 | /// Dependencies (like rerun-if-changed) declared by a build script. | |
35 | /// This is *only* populated from the output from previous runs. | |
36 | /// If the build script hasn't ever been run, then it must be run. | |
c85ed044 | 37 | pub build_explicit_deps: HashMap<Unit, BuildDeps>, |
bd31c081 | 38 | /// Fingerprints used to detect if a unit is out-of-date. |
c85ed044 | 39 | pub fingerprints: HashMap<Unit, Arc<Fingerprint>>, |
bd31c081 | 40 | /// Cache of file mtimes to reduce filesystem hits. |
b1b9b79c | 41 | pub mtime_cache: HashMap<PathBuf, FileTime>, |
bd31c081 EH |
42 | /// A set used to track which units have been compiled. |
43 | /// A unit may appear in the job graph multiple times as a dependency of | |
44 | /// multiple packages, but it only needs to run once. | |
c85ed044 | 45 | pub compiled: HashSet<Unit>, |
bd31c081 EH |
46 | /// Linking information for each `Unit`. |
47 | /// See `build_map` for details. | |
c85ed044 | 48 | pub build_scripts: HashMap<Unit, Arc<BuildScripts>>, |
bd31c081 | 49 | /// Job server client to manage concurrency with other processes. |
c32e395c | 50 | pub jobserver: Client, |
bd31c081 EH |
51 | /// "Primary" packages are the ones the user selected on the command-line |
52 | /// with `-p` flags. If no flags are specified, then it is the defaults | |
53 | /// based on the current directory and the default workspace members. | |
dae87a26 | 54 | primary_packages: HashSet<PackageId>, |
bd31c081 EH |
55 | /// An abstraction of the files and directories that will be generated by |
56 | /// the compilation. This is `None` until after `unit_dependencies` has | |
57 | /// been computed. | |
c32e395c | 58 | files: Option<CompilationFiles<'a, 'cfg>>, |
127fdfeb AC |
59 | |
60 | /// A flag indicating whether pipelining is enabled for this compilation | |
61 | /// session. Pipelining largely only affects the edges of the dependency | |
62 | /// graph that we generate at the end, and otherwise it's pretty | |
63 | /// straightforward. | |
64 | pipelining: bool, | |
65 | ||
66 | /// A set of units which are compiling rlibs and are expected to produce | |
67 | /// metadata files in addition to the rlib itself. This is only filled in | |
68 | /// when `pipelining` above is enabled. | |
c85ed044 | 69 | rmeta_required: HashSet<Unit>, |
6117f526 MR |
70 | |
71 | /// When we're in jobserver-per-rustc process mode, this keeps those | |
72 | /// jobserver clients for each Unit (which eventually becomes a rustc | |
73 | /// process). | |
c85ed044 | 74 | pub rustc_clients: HashMap<Unit, Client>, |
e2219254 AC |
75 | |
76 | /// Map of the LTO-status of each unit. This indicates what sort of | |
77 | /// compilation is happening (only object, only bitcode, both, etc), and is | |
78 | /// precalculated early on. | |
79 | pub lto: HashMap<Unit, Lto>, | |
c32e395c DO |
80 | } |
81 | ||
82 | impl<'a, 'cfg> Context<'a, 'cfg> { | |
e0bd9e23 | 83 | pub fn new(bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult<Self> { |
c32e395c DO |
84 | // Load up the jobserver that we'll use to manage our parallelism. This |
85 | // is the same as the GNU make implementation of a jobserver, and | |
86 | // intentionally so! It's hoped that we can interact with GNU make and | |
87 | // all share the same jobserver. | |
88 | // | |
89 | // Note that if we don't have a jobserver in our environment then we | |
cab8640b AC |
90 | // create our own, and we create it with `n` tokens, but immediately |
91 | // acquire one, because one token is ourself, a running process. | |
e0bd9e23 | 92 | let jobserver = match bcx.config.jobserver_from_env() { |
c32e395c | 93 | Some(c) => c.clone(), |
cab8640b AC |
94 | None => { |
95 | let client = Client::new(bcx.build_config.jobs as usize) | |
96 | .chain_err(|| "failed to create jobserver")?; | |
97 | client.acquire_raw()?; | |
98 | client | |
99 | } | |
c32e395c DO |
100 | }; |
101 | ||
d7d8ca1e | 102 | let pipelining = bcx.config.build_config()?.pipelining.unwrap_or(true); |
127fdfeb | 103 | |
c32e395c DO |
104 | Ok(Self { |
105 | bcx, | |
e0bd9e23 | 106 | compilation: Compilation::new(bcx)?, |
bd31c081 | 107 | build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())), |
c32e395c | 108 | fingerprints: HashMap::new(), |
b1b9b79c | 109 | mtime_cache: HashMap::new(), |
9e779198 | 110 | compiled: HashSet::new(), |
5a953eb0 | 111 | build_scripts: HashMap::new(), |
7c97c5bf | 112 | build_explicit_deps: HashMap::new(), |
0247dc42 | 113 | jobserver, |
7691deb3 | 114 | primary_packages: HashSet::new(), |
cf0bb13d | 115 | files: None, |
127fdfeb | 116 | rmeta_required: HashSet::new(), |
6117f526 | 117 | rustc_clients: HashMap::new(), |
127fdfeb | 118 | pipelining, |
e2219254 | 119 | lto: HashMap::new(), |
c32e395c | 120 | }) |
4e2e0191 DO |
121 | } |
122 | ||
2296af27 EH |
123 | /// Starts compilation, waits for it to finish, and returns information |
124 | /// about the result of compilation. | |
e0bd9e23 EH |
125 | pub fn compile(mut self, exec: &Arc<dyn Executor>) -> CargoResult<Compilation<'cfg>> { |
126 | let mut queue = JobQueue::new(self.bcx); | |
72e6b9d3 MS |
127 | let mut plan = BuildPlan::new(); |
128 | let build_plan = self.bcx.build_config.build_plan; | |
e0bd9e23 | 129 | self.prepare_units()?; |
4e2e0191 | 130 | self.prepare()?; |
e0bd9e23 | 131 | custom_build::build_map(&mut self)?; |
e2219254 | 132 | super::lto::generate(&mut self)?; |
fa0787aa | 133 | self.check_collistions()?; |
4e2e0191 | 134 | |
e0bd9e23 | 135 | for unit in &self.bcx.roots { |
4e2e0191 DO |
136 | // Build up a list of pending jobs, each of which represent |
137 | // compiling a particular package. No actual work is executed as | |
138 | // part of this, that's all done next as part of the `execute` | |
139 | // function which will run everything in order with proper | |
140 | // parallelism. | |
3db193ea DW |
141 | let force_rebuild = self.bcx.build_config.force_rebuild; |
142 | super::compile(&mut self, &mut queue, &mut plan, unit, exec, force_rebuild)?; | |
4e2e0191 DO |
143 | } |
144 | ||
c3868bb6 AC |
145 | // Now that we've got the full job queue and we've done all our |
146 | // fingerprint analysis to determine what to run, bust all the memoized | |
147 | // fingerprint hashes to ensure that during the build they all get the | |
148 | // most up-to-date values. In theory we only need to bust hashes that | |
149 | // transitively depend on a dirty build script, but it shouldn't matter | |
150 | // that much for performance anyway. | |
151 | for fingerprint in self.fingerprints.values() { | |
152 | fingerprint.clear_memoized(); | |
153 | } | |
154 | ||
4e2e0191 | 155 | // Now that we've figured out everything that we're going to do, do it! |
72e6b9d3 MS |
156 | queue.execute(&mut self, &mut plan)?; |
157 | ||
158 | if build_plan { | |
d8e43e81 | 159 | plan.set_inputs(self.build_plan_inputs()?); |
62711bd3 | 160 | plan.output_plan(self.bcx.config); |
72e6b9d3 | 161 | } |
4e2e0191 | 162 | |
78c788f9 | 163 | // Collect the result of the build into `self.compilation`. |
e0bd9e23 | 164 | for unit in &self.bcx.roots { |
78c788f9 | 165 | // Collect tests and executables. |
4e2e0191 | 166 | for output in self.outputs(unit)?.iter() { |
189fef11 TL |
167 | if output.flavor == FileFlavor::DebugInfo || output.flavor == FileFlavor::Auxiliary |
168 | { | |
4e2e0191 DO |
169 | continue; |
170 | } | |
171 | ||
b0a6c426 | 172 | let bindst = output.bin_dst(); |
4e2e0191 | 173 | |
00d325db | 174 | if unit.mode == CompileMode::Test { |
3fd28143 AC |
175 | self.compilation |
176 | .tests | |
177 | .push((unit.clone(), output.path.clone())); | |
5d09137b | 178 | } else if unit.target.is_executable() { |
3fd28143 AC |
179 | self.compilation |
180 | .binaries | |
181 | .push((unit.clone(), bindst.clone())); | |
4e2e0191 DO |
182 | } |
183 | } | |
184 | ||
78c788f9 EH |
185 | // If the unit has a build script, add `OUT_DIR` to the |
186 | // environment variables. | |
cc9f5a87 | 187 | if unit.target.is_lib() { |
e0bd9e23 | 188 | for dep in &self.bcx.unit_graph[unit] { |
cc9f5a87 EH |
189 | if dep.unit.mode.is_run_custom_build() { |
190 | let out_dir = self | |
191 | .files() | |
192 | .build_script_out_dir(&dep.unit) | |
193 | .display() | |
194 | .to_string(); | |
195 | self.compilation | |
196 | .extra_env | |
197 | .entry(dep.unit.pkg.package_id()) | |
198 | .or_insert_with(Vec::new) | |
199 | .push(("OUT_DIR".to_string(), out_dir)); | |
200 | } | |
4e2e0191 | 201 | } |
4e2e0191 DO |
202 | } |
203 | ||
78c788f9 | 204 | // Collect information for `rustdoc --test`. |
935adb31 | 205 | if unit.mode.is_doc_test() { |
1c779ac5 EH |
206 | let mut unstable_opts = false; |
207 | let args = compiler::extern_args(&self, unit, &mut unstable_opts)?; | |
812fba62 | 208 | self.compilation.to_doc_test.push(compilation::Doctest { |
3fd28143 | 209 | unit: unit.clone(), |
1c779ac5 EH |
210 | args, |
211 | unstable_opts, | |
b4476d74 | 212 | linker: self.bcx.linker(unit.kind), |
812fba62 | 213 | }); |
00d325db EH |
214 | } |
215 | ||
78c788f9 | 216 | // Collect the enabled features. |
a7efa91e | 217 | let feats = &unit.features; |
4e2e0191 DO |
218 | if !feats.is_empty() { |
219 | self.compilation | |
220 | .cfgs | |
dae87a26 | 221 | .entry(unit.pkg.package_id()) |
4e2e0191 DO |
222 | .or_insert_with(|| { |
223 | feats | |
224 | .iter() | |
225 | .map(|feat| format!("feature=\"{}\"", feat)) | |
226 | .collect() | |
227 | }); | |
228 | } | |
78c788f9 EH |
229 | |
230 | // Collect rustdocflags. | |
bd8253fd | 231 | let rustdocflags = self.bcx.rustdocflags_args(unit); |
4e2e0191 DO |
232 | if !rustdocflags.is_empty() { |
233 | self.compilation | |
234 | .rustdocflags | |
dae87a26 | 235 | .entry(unit.pkg.package_id()) |
ec21e12d | 236 | .or_insert_with(|| rustdocflags.to_vec()); |
4e2e0191 DO |
237 | } |
238 | ||
239 | super::output_depinfo(&mut self, unit)?; | |
240 | } | |
241 | ||
2296af27 | 242 | for (pkg_id, output) in self.build_script_outputs.lock().unwrap().iter() { |
4e2e0191 DO |
243 | self.compilation |
244 | .cfgs | |
2296af27 | 245 | .entry(pkg_id) |
4e2e0191 DO |
246 | .or_insert_with(HashSet::new) |
247 | .extend(output.cfgs.iter().cloned()); | |
248 | ||
249 | self.compilation | |
250 | .extra_env | |
2296af27 | 251 | .entry(pkg_id) |
4e2e0191 DO |
252 | .or_insert_with(Vec::new) |
253 | .extend(output.env.iter().cloned()); | |
254 | ||
255 | for dir in output.library_paths.iter() { | |
256 | self.compilation.native_dirs.insert(dir.clone()); | |
257 | } | |
258 | } | |
4e2e0191 | 259 | Ok(self.compilation) |
1e1412fd AC |
260 | } |
261 | ||
282f238d | 262 | /// Returns the executable for the specified unit (if any). |
c85ed044 | 263 | pub fn get_executable(&mut self, unit: &Unit) -> CargoResult<Option<PathBuf>> { |
282f238d | 264 | for output in self.outputs(unit)?.iter() { |
eac3b66b | 265 | if output.flavor != FileFlavor::Normal { |
282f238d DW |
266 | continue; |
267 | } | |
268 | ||
5d09137b | 269 | let is_binary = unit.target.is_executable(); |
282f238d DW |
270 | let is_test = unit.mode.is_any_test() && !unit.mode.is_check(); |
271 | ||
272 | if is_binary || is_test { | |
b0a6c426 | 273 | return Ok(Option::Some(output.bin_dst().clone())); |
282f238d DW |
274 | } |
275 | } | |
c523ce60 | 276 | Ok(None) |
282f238d DW |
277 | } |
278 | ||
e0bd9e23 | 279 | pub fn prepare_units(&mut self) -> CargoResult<()> { |
77ee608d | 280 | let dest = self.bcx.profiles.get_dir_name(); |
86c459d4 | 281 | let host_layout = Layout::new(self.bcx.ws, None, &dest)?; |
593a02f2 | 282 | let mut targets = HashMap::new(); |
3fd28143 AC |
283 | for kind in self.bcx.build_config.requested_kinds.iter() { |
284 | if let CompileKind::Target(target) = *kind { | |
285 | let layout = Layout::new(self.bcx.ws, Some(target), &dest)?; | |
286 | targets.insert(target, layout); | |
287 | } | |
593a02f2 | 288 | } |
dae87a26 | 289 | self.primary_packages |
e0bd9e23 | 290 | .extend(self.bcx.roots.iter().map(|u| u.pkg.package_id())); |
4d7d0636 | 291 | |
9a8d695b EH |
292 | self.record_units_requiring_metadata(); |
293 | ||
e0bd9e23 | 294 | let files = CompilationFiles::new(self, host_layout, targets); |
4d7d0636 DO |
295 | self.files = Some(files); |
296 | Ok(()) | |
297 | } | |
298 | ||
3f7b09cc AC |
299 | /// Prepare this context, ensuring that all filesystem directories are in |
300 | /// place. | |
0863469c | 301 | pub fn prepare(&mut self) -> CargoResult<()> { |
3f7b09cc AC |
302 | let _p = profile::start("preparing layout"); |
303 | ||
cf0bb13d AK |
304 | self.files_mut() |
305 | .host | |
1e682848 | 306 | .prepare() |
0d44a826 | 307 | .chain_err(|| "couldn't prepare build directories")?; |
593a02f2 | 308 | for target in self.files.as_mut().unwrap().target.values_mut() { |
1e682848 AC |
309 | target |
310 | .prepare() | |
0d44a826 | 311 | .chain_err(|| "couldn't prepare build directories")?; |
3f7b09cc AC |
312 | } |
313 | ||
cf0bb13d | 314 | let files = self.files.as_ref().unwrap(); |
3fd28143 AC |
315 | for &kind in self |
316 | .bcx | |
317 | .build_config | |
318 | .requested_kinds | |
319 | .iter() | |
320 | .chain(Some(&CompileKind::Host)) | |
321 | { | |
322 | let layout = files.layout(kind); | |
323 | self.compilation | |
324 | .root_output | |
325 | .insert(kind, layout.dest().to_path_buf()); | |
326 | self.compilation | |
327 | .deps_output | |
328 | .insert(kind, layout.deps().to_path_buf()); | |
329 | } | |
3f7b09cc AC |
330 | Ok(()) |
331 | } | |
332 | ||
cf0bb13d AK |
333 | pub fn files(&self) -> &CompilationFiles<'a, 'cfg> { |
334 | self.files.as_ref().unwrap() | |
ff19a482 AC |
335 | } |
336 | ||
cf0bb13d AK |
337 | fn files_mut(&mut self) -> &mut CompilationFiles<'a, 'cfg> { |
338 | self.files.as_mut().unwrap() | |
d15aaa82 AC |
339 | } |
340 | ||
f7c91ba6 | 341 | /// Returns the filenames that the given unit will generate. |
c85ed044 | 342 | pub fn outputs(&self, unit: &Unit) -> CargoResult<Arc<Vec<OutputFile>>> { |
c32e395c | 343 | self.files.as_ref().unwrap().outputs(unit, self.bcx) |
1e1412fd AC |
344 | } |
345 | ||
1f14fa31 | 346 | /// Direct dependencies for the given unit. |
c85ed044 | 347 | pub fn unit_deps(&self, unit: &Unit) -> &[UnitDep] { |
e0bd9e23 | 348 | &self.bcx.unit_graph[unit] |
7691deb3 | 349 | } |
44a7ee7d | 350 | |
2296af27 EH |
351 | /// Returns the RunCustomBuild Unit associated with the given Unit. |
352 | /// | |
353 | /// If the package does not have a build script, this returns None. | |
c85ed044 | 354 | pub fn find_build_script_unit(&self, unit: Unit) -> Option<Unit> { |
2296af27 EH |
355 | if unit.mode.is_run_custom_build() { |
356 | return Some(unit); | |
357 | } | |
e0bd9e23 | 358 | self.bcx.unit_graph[&unit] |
2296af27 EH |
359 | .iter() |
360 | .find(|unit_dep| { | |
361 | unit_dep.unit.mode.is_run_custom_build() | |
362 | && unit_dep.unit.pkg.package_id() == unit.pkg.package_id() | |
363 | }) | |
45d49579 | 364 | .map(|unit_dep| unit_dep.unit.clone()) |
2296af27 EH |
365 | } |
366 | ||
367 | /// Returns the metadata hash for the RunCustomBuild Unit associated with | |
368 | /// the given unit. | |
369 | /// | |
370 | /// If the package does not have a build script, this returns None. | |
c85ed044 | 371 | pub fn find_build_script_metadata(&self, unit: Unit) -> Option<Metadata> { |
2296af27 EH |
372 | let script_unit = self.find_build_script_unit(unit)?; |
373 | Some(self.get_run_build_script_metadata(&script_unit)) | |
374 | } | |
375 | ||
376 | /// Returns the metadata hash for a RunCustomBuild unit. | |
c85ed044 | 377 | pub fn get_run_build_script_metadata(&self, unit: &Unit) -> Metadata { |
2296af27 EH |
378 | assert!(unit.mode.is_run_custom_build()); |
379 | self.files() | |
7438770b | 380 | .metadata(unit) |
2296af27 EH |
381 | .expect("build script should always have hash") |
382 | } | |
383 | ||
c85ed044 | 384 | pub fn is_primary_package(&self, unit: &Unit) -> bool { |
dae87a26 | 385 | self.primary_packages.contains(&unit.pkg.package_id()) |
44a7ee7d AC |
386 | } |
387 | ||
f7c91ba6 AR |
388 | /// Returns the list of filenames read by cargo to generate the `BuildContext` |
389 | /// (all `Cargo.toml`, etc.). | |
d8e43e81 | 390 | pub fn build_plan_inputs(&self) -> CargoResult<Vec<PathBuf>> { |
1f14fa31 | 391 | // Keep sorted for consistency. |
9a8d695b | 392 | let mut inputs = BTreeSet::new(); |
1f14fa31 | 393 | // Note: dev-deps are skipped if they are not present in the unit graph. |
e0bd9e23 | 394 | for unit in self.bcx.unit_graph.keys() { |
9a8d695b | 395 | inputs.insert(unit.pkg.manifest_path().to_path_buf()); |
44a7ee7d | 396 | } |
9a8d695b | 397 | Ok(inputs.into_iter().collect()) |
44a7ee7d | 398 | } |
fa0787aa EH |
399 | |
400 | fn check_collistions(&self) -> CargoResult<()> { | |
401 | let mut output_collisions = HashMap::new(); | |
c85ed044 AC |
402 | let describe_collision = |unit: &Unit, other_unit: &Unit, path: &PathBuf| -> String { |
403 | format!( | |
404 | "The {} target `{}` in package `{}` has the same output \ | |
76ce4dfe AC |
405 | filename as the {} target `{}` in package `{}`.\n\ |
406 | Colliding filename is: {}\n", | |
c85ed044 AC |
407 | unit.target.kind().description(), |
408 | unit.target.name(), | |
409 | unit.pkg.package_id(), | |
410 | other_unit.target.kind().description(), | |
411 | other_unit.target.name(), | |
412 | other_unit.pkg.package_id(), | |
413 | path.display() | |
414 | ) | |
415 | }; | |
f16efff1 AC |
416 | let suggestion = |
417 | "Consider changing their names to be unique or compiling them separately.\n\ | |
418 | This may become a hard error in the future; see \ | |
419 | <https://github.com/rust-lang/cargo/issues/6313>."; | |
5d98fca8 EH |
420 | let rustdoc_suggestion = |
421 | "This is a known bug where multiple crates with the same name use\n\ | |
422 | the same path; see <https://github.com/rust-lang/cargo/issues/6313>."; | |
c85ed044 AC |
423 | let report_collision = |unit: &Unit, |
424 | other_unit: &Unit, | |
5d98fca8 EH |
425 | path: &PathBuf, |
426 | suggestion: &str| | |
dae87a26 | 427 | -> CargoResult<()> { |
fa0787aa | 428 | if unit.target.name() == other_unit.target.name() { |
a10eb011 | 429 | self.bcx.config.shell().warn(format!( |
fa0787aa | 430 | "output filename collision.\n\ |
dae87a26 E |
431 | {}\ |
432 | The targets should have unique names.\n\ | |
433 | {}", | |
fa0787aa EH |
434 | describe_collision(unit, other_unit, path), |
435 | suggestion | |
a10eb011 | 436 | )) |
fa0787aa | 437 | } else { |
a10eb011 | 438 | self.bcx.config.shell().warn(format!( |
fa0787aa EH |
439 | "output filename collision.\n\ |
440 | {}\ | |
a10eb011 | 441 | The output filenames should be unique.\n\ |
fa0787aa EH |
442 | {}\n\ |
443 | If this looks unexpected, it may be a bug in Cargo. Please file a bug report at\n\ | |
444 | https://github.com/rust-lang/cargo/issues/ with as much information as you\n\ | |
445 | can provide.\n\ | |
446 | {} running on `{}` target `{}`\n\ | |
447 | First unit: {:?}\n\ | |
448 | Second unit: {:?}", | |
449 | describe_collision(unit, other_unit, path), | |
450 | suggestion, | |
593a02f2 AC |
451 | crate::version(), |
452 | self.bcx.host_triple(), | |
949eccac | 453 | self.bcx.target_data.short_name(&unit.kind), |
593a02f2 AC |
454 | unit, |
455 | other_unit)) | |
fa0787aa EH |
456 | } |
457 | }; | |
5d98fca8 | 458 | |
fa0787aa | 459 | let mut keys = self |
e0bd9e23 EH |
460 | .bcx |
461 | .unit_graph | |
fa0787aa EH |
462 | .keys() |
463 | .filter(|unit| !unit.mode.is_run_custom_build()) | |
464 | .collect::<Vec<_>>(); | |
465 | // Sort for consistent error messages. | |
466 | keys.sort_unstable(); | |
467 | for unit in keys { | |
468 | for output in self.outputs(unit)?.iter() { | |
dae87a26 | 469 | if let Some(other_unit) = output_collisions.insert(output.path.clone(), unit) { |
5d98fca8 EH |
470 | if unit.mode.is_doc() { |
471 | // See https://github.com/rust-lang/rust/issues/56169 | |
472 | // and https://github.com/rust-lang/rust/issues/61378 | |
473 | report_collision(unit, other_unit, &output.path, rustdoc_suggestion)?; | |
474 | } else { | |
475 | report_collision(unit, other_unit, &output.path, suggestion)?; | |
476 | } | |
fa0787aa EH |
477 | } |
478 | if let Some(hardlink) = output.hardlink.as_ref() { | |
dae87a26 | 479 | if let Some(other_unit) = output_collisions.insert(hardlink.clone(), unit) { |
5d98fca8 | 480 | report_collision(unit, other_unit, hardlink, suggestion)?; |
fa0787aa EH |
481 | } |
482 | } | |
483 | if let Some(ref export_path) = output.export_path { | |
dae87a26 E |
484 | if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) { |
485 | self.bcx.config.shell().warn(format!( | |
486 | "`--out-dir` filename collision.\n\ | |
487 | {}\ | |
488 | The exported filenames should be unique.\n\ | |
489 | {}", | |
ef0b4776 | 490 | describe_collision(unit, other_unit, export_path), |
fa0787aa | 491 | suggestion |
dae87a26 | 492 | ))?; |
fa0787aa EH |
493 | } |
494 | } | |
495 | } | |
496 | } | |
497 | Ok(()) | |
127fdfeb AC |
498 | } |
499 | ||
9a8d695b EH |
500 | /// Records the list of units which are required to emit metadata. |
501 | /// | |
502 | /// Units which depend only on the metadata of others requires the others to | |
503 | /// actually produce metadata, so we'll record that here. | |
504 | fn record_units_requiring_metadata(&mut self) { | |
e0bd9e23 | 505 | for (key, deps) in self.bcx.unit_graph.iter() { |
9a8d695b | 506 | for dep in deps { |
1f14fa31 | 507 | if self.only_requires_rmeta(key, &dep.unit) { |
45d49579 | 508 | self.rmeta_required.insert(dep.unit.clone()); |
9a8d695b EH |
509 | } |
510 | } | |
511 | } | |
512 | } | |
513 | ||
127fdfeb AC |
514 | /// Returns whether when `parent` depends on `dep` if it only requires the |
515 | /// metadata file from `dep`. | |
c85ed044 | 516 | pub fn only_requires_rmeta(&self, parent: &Unit, dep: &Unit) -> bool { |
127fdfeb AC |
517 | // this is only enabled when pipelining is enabled |
518 | self.pipelining | |
519 | // We're only a candidate for requiring an `rmeta` file if we | |
520 | // ourselves are building an rlib, | |
0c51d71c | 521 | && !parent.requires_upstream_objects() |
127fdfeb AC |
522 | && parent.mode == CompileMode::Build |
523 | // Our dependency must also be built as an rlib, otherwise the | |
524 | // object code must be useful in some fashion | |
0c51d71c | 525 | && !dep.requires_upstream_objects() |
127fdfeb AC |
526 | && dep.mode == CompileMode::Build |
527 | } | |
528 | ||
529 | /// Returns whether when `unit` is built whether it should emit metadata as | |
530 | /// well because some compilations rely on that. | |
c85ed044 | 531 | pub fn rmeta_required(&self, unit: &Unit) -> bool { |
e913efef | 532 | self.rmeta_required.contains(unit) || self.bcx.config.cli_unstable().timings.is_some() |
fa0787aa | 533 | } |
ec80cf90 MR |
534 | |
535 | pub fn new_jobserver(&mut self) -> CargoResult<Client> { | |
536 | let tokens = self.bcx.build_config.jobs as usize; | |
537 | let client = Client::new(tokens).chain_err(|| "failed to create jobserver")?; | |
538 | ||
539 | // Drain the client fully | |
540 | for i in 0..tokens { | |
de6c78c2 MR |
541 | client.acquire_raw().chain_err(|| { |
542 | format!( | |
543 | "failed to fully drain {}/{} token from jobserver at startup", | |
544 | i, tokens, | |
545 | ) | |
546 | })?; | |
ec80cf90 MR |
547 | } |
548 | ||
549 | Ok(client) | |
550 | } | |
c25c1ae1 | 551 | } |