]>
Commit | Line | Data |
---|---|---|
0a29b90c FG |
1 | //! # Interact with the compiler |
2 | //! | |
3 | //! If you consider [`ops::cargo_compile::compile`] as a `rustc` driver but on | |
4 | //! Cargo side, this module is kinda the `rustc_interface` for that merits. | |
5 | //! It contains all the interaction between Cargo and the rustc compiler, | |
6 | //! from preparing the context for the entire build process, to scheduling | |
7 | //! and executing each unit of work (e.g. running `rustc`), to managing and | |
8 | //! caching the output artifact of a build. | |
9 | //! | |
10 | //! However, it hasn't yet exposed a clear definition of each phase or session, | |
11 | //! like what rustc has done[^1]. Also, no one knows if Cargo really needs that. | |
12 | //! To be pragmatic, here we list a handful of items you may want to learn: | |
13 | //! | |
781aab86 | 14 | //! * [`BuildContext`] is a static context containing all information you need |
0a29b90c FG |
15 | //! before a build gets started. |
16 | //! * [`Context`] is the center of the world, coordinating a running build and | |
17 | //! collecting information from it. | |
18 | //! * [`custom_build`] is the home of build script executions and output parsing. | |
19 | //! * [`fingerprint`] not only defines but also executes a set of rules to | |
20 | //! determine if a re-compile is needed. | |
21 | //! * [`job_queue`] is where the parallelism, job scheduling, and communication | |
781aab86 | 22 | //! machinery happen between Cargo and the compiler. |
0a29b90c FG |
23 | //! * [`layout`] defines and manages output artifacts of a build in the filesystem. |
24 | //! * [`unit_dependencies`] is for building a dependency graph for compilation | |
25 | //! from a result of dependency resolution. | |
26 | //! * [`Unit`] contains sufficient information to build something, usually | |
27 | //! turning into a compiler invocation in a later phase. | |
28 | //! | |
29 | //! [^1]: Maybe [`-Zbuild-plan`](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-plan) | |
30 | //! was designed to serve that purpose but still [in flux](https://github.com/rust-lang/cargo/issues/7614). | |
31 | //! | |
32 | //! [`ops::cargo_compile::compile`]: crate::ops::compile | |
33 | ||
34 | pub mod artifact; | |
35 | mod build_config; | |
36 | pub(crate) mod build_context; | |
37 | mod build_plan; | |
38 | mod compilation; | |
39 | mod compile_kind; | |
40 | pub(crate) mod context; | |
41 | mod crate_type; | |
42 | mod custom_build; | |
43 | pub(crate) mod fingerprint; | |
44 | pub mod future_incompat; | |
45 | pub(crate) mod job_queue; | |
46 | pub(crate) mod layout; | |
47 | mod links; | |
48 | mod lto; | |
49 | mod output_depinfo; | |
50 | pub mod rustdoc; | |
51 | pub mod standard_lib; | |
52 | mod timings; | |
53 | mod unit; | |
54 | pub mod unit_dependencies; | |
55 | pub mod unit_graph; | |
56 | ||
57 | use std::collections::{HashMap, HashSet}; | |
58 | use std::env; | |
59 | use std::ffi::{OsStr, OsString}; | |
60 | use std::fmt::Display; | |
61 | use std::fs::{self, File}; | |
62 | use std::io::{BufRead, Write}; | |
63 | use std::path::{Path, PathBuf}; | |
64 | use std::sync::Arc; | |
65 | ||
66 | use anyhow::{Context as _, Error}; | |
67 | use lazycell::LazyCell; | |
add651ee | 68 | use tracing::{debug, trace}; |
0a29b90c FG |
69 | |
70 | pub use self::build_config::{BuildConfig, CompileMode, MessageFormat, TimingOutput}; | |
71 | pub use self::build_context::{ | |
72 | BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, | |
73 | }; | |
74 | use self::build_plan::BuildPlan; | |
75 | pub use self::compilation::{Compilation, Doctest, UnitOutput}; | |
76 | pub use self::compile_kind::{CompileKind, CompileTarget}; | |
77 | pub use self::context::{Context, Metadata}; | |
78 | pub use self::crate_type::CrateType; | |
fe692bf9 | 79 | pub use self::custom_build::LinkArgTarget; |
0a29b90c FG |
80 | pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; |
81 | pub(crate) use self::fingerprint::DirtyReason; | |
82 | pub use self::job_queue::Freshness; | |
83 | use self::job_queue::{Job, JobQueue, JobState, Work}; | |
84 | pub(crate) use self::layout::Layout; | |
85 | pub use self::lto::Lto; | |
86 | use self::output_depinfo::output_depinfo; | |
87 | use self::unit_graph::UnitDep; | |
88 | use crate::core::compiler::future_incompat::FutureIncompatReport; | |
89 | pub use crate::core::compiler::unit::{Unit, UnitInterner}; | |
90 | use crate::core::manifest::TargetSourcePath; | |
91 | use crate::core::profiles::{PanicStrategy, Profile, Strip}; | |
92 | use crate::core::{Feature, PackageId, Target, Verbosity}; | |
93 | use crate::util::errors::{CargoResult, VerboseError}; | |
94 | use crate::util::interning::InternedString; | |
95 | use crate::util::machine_message::{self, Message}; | |
ed00b5ec FG |
96 | use crate::util::toml::schema::TomlDebugInfo; |
97 | use crate::util::toml::schema::TomlTrimPaths; | |
0a29b90c FG |
98 | use crate::util::{add_path_args, internal, iter_join_onto, profile}; |
99 | use cargo_util::{paths, ProcessBuilder, ProcessError}; | |
100 | use rustfix::diagnostics::Applicability; | |
101 | ||
102 | const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version"; | |
103 | ||
0a29b90c FG |
104 | /// A glorified callback for executing calls to rustc. Rather than calling rustc |
105 | /// directly, we'll use an `Executor`, giving clients an opportunity to intercept | |
106 | /// the build calls. | |
107 | pub trait Executor: Send + Sync + 'static { | |
108 | /// Called after a rustc process invocation is prepared up-front for a given | |
109 | /// unit of work (may still be modified for runtime-known dependencies, when | |
110 | /// the work is actually executed). | |
111 | fn init(&self, _cx: &Context<'_, '_>, _unit: &Unit) {} | |
112 | ||
113 | /// In case of an `Err`, Cargo will not continue with the build process for | |
114 | /// this package. | |
115 | fn exec( | |
116 | &self, | |
117 | cmd: &ProcessBuilder, | |
118 | id: PackageId, | |
119 | target: &Target, | |
120 | mode: CompileMode, | |
121 | on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, | |
122 | on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, | |
123 | ) -> CargoResult<()>; | |
124 | ||
125 | /// Queried when queuing each unit of work. If it returns true, then the | |
126 | /// unit will always be rebuilt, independent of whether it needs to be. | |
127 | fn force_rebuild(&self, _unit: &Unit) -> bool { | |
128 | false | |
129 | } | |
130 | } | |
131 | ||
132 | /// A `DefaultExecutor` calls rustc without doing anything else. It is Cargo's | |
133 | /// default behaviour. | |
134 | #[derive(Copy, Clone)] | |
135 | pub struct DefaultExecutor; | |
136 | ||
137 | impl Executor for DefaultExecutor { | |
138 | fn exec( | |
139 | &self, | |
140 | cmd: &ProcessBuilder, | |
141 | _id: PackageId, | |
142 | _target: &Target, | |
143 | _mode: CompileMode, | |
144 | on_stdout_line: &mut dyn FnMut(&str) -> CargoResult<()>, | |
145 | on_stderr_line: &mut dyn FnMut(&str) -> CargoResult<()>, | |
146 | ) -> CargoResult<()> { | |
147 | cmd.exec_with_streaming(on_stdout_line, on_stderr_line, false) | |
148 | .map(drop) | |
149 | } | |
150 | } | |
151 | ||
152 | /// Builds up and enqueue a list of pending jobs onto the `job` queue. | |
153 | /// | |
154 | /// Starting from the `unit`, this function recursively calls itself to build | |
155 | /// all jobs for dependencies of the `unit`. Each of these jobs represents | |
156 | /// compiling a particular package. | |
157 | /// | |
158 | /// Note that **no actual work is executed as part of this**, that's all done | |
159 | /// next as part of [`JobQueue::execute`] function which will run everything | |
160 | /// in order with proper parallelism. | |
161 | fn compile<'cfg>( | |
162 | cx: &mut Context<'_, 'cfg>, | |
163 | jobs: &mut JobQueue<'cfg>, | |
164 | plan: &mut BuildPlan, | |
165 | unit: &Unit, | |
166 | exec: &Arc<dyn Executor>, | |
167 | force_rebuild: bool, | |
168 | ) -> CargoResult<()> { | |
169 | let bcx = cx.bcx; | |
170 | let build_plan = bcx.build_config.build_plan; | |
171 | if !cx.compiled.insert(unit.clone()) { | |
172 | return Ok(()); | |
173 | } | |
174 | ||
175 | // Build up the work to be done to compile this unit, enqueuing it once | |
176 | // we've got everything constructed. | |
177 | let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); | |
178 | fingerprint::prepare_init(cx, unit)?; | |
179 | ||
180 | let job = if unit.mode.is_run_custom_build() { | |
181 | custom_build::prepare(cx, unit)? | |
182 | } else if unit.mode.is_doc_test() { | |
183 | // We run these targets later, so this is just a no-op for now. | |
184 | Job::new_fresh() | |
185 | } else if build_plan { | |
186 | Job::new_dirty(rustc(cx, unit, &exec.clone())?, None) | |
187 | } else { | |
188 | let force = exec.force_rebuild(unit) || force_rebuild; | |
189 | let mut job = fingerprint::prepare_target(cx, unit, force)?; | |
190 | job.before(if job.freshness().is_dirty() { | |
191 | let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { | |
192 | rustdoc(cx, unit)? | |
193 | } else { | |
194 | rustc(cx, unit, exec)? | |
195 | }; | |
196 | work.then(link_targets(cx, unit, false)?) | |
197 | } else { | |
198 | // We always replay the output cache, | |
199 | // since it might contain future-incompat-report messages | |
200 | let work = replay_output_cache( | |
201 | unit.pkg.package_id(), | |
202 | PathBuf::from(unit.pkg.manifest_path()), | |
203 | &unit.target, | |
204 | cx.files().message_cache_path(unit), | |
205 | cx.bcx.build_config.message_format, | |
0a29b90c FG |
206 | unit.show_warnings(bcx.config), |
207 | ); | |
208 | // Need to link targets on both the dirty and fresh. | |
209 | work.then(link_targets(cx, unit, true)?) | |
210 | }); | |
211 | ||
212 | job | |
213 | }; | |
214 | jobs.enqueue(cx, unit, job)?; | |
215 | drop(p); | |
216 | ||
217 | // Be sure to compile all dependencies of this target as well. | |
218 | let deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. | |
219 | for dep in deps { | |
220 | compile(cx, jobs, plan, &dep.unit, exec, false)?; | |
221 | } | |
222 | if build_plan { | |
223 | plan.add(cx, unit)?; | |
224 | } | |
225 | ||
226 | Ok(()) | |
227 | } | |
228 | ||
229 | /// Generates the warning message used when fallible doc-scrape units fail, | |
230 | /// either for rustdoc or rustc. | |
231 | fn make_failed_scrape_diagnostic( | |
232 | cx: &Context<'_, '_>, | |
233 | unit: &Unit, | |
234 | top_line: impl Display, | |
235 | ) -> String { | |
236 | let manifest_path = unit.pkg.manifest_path(); | |
237 | let relative_manifest_path = manifest_path | |
238 | .strip_prefix(cx.bcx.ws.root()) | |
239 | .unwrap_or(&manifest_path); | |
240 | ||
241 | format!( | |
242 | "\ | |
243 | {top_line} | |
244 | Try running with `--verbose` to see the error message. | |
245 | If an example should not be scanned, then consider adding `doc-scrape-examples = false` to its `[[example]]` definition in {}", | |
246 | relative_manifest_path.display() | |
247 | ) | |
248 | } | |
249 | ||
250 | /// Creates a unit of work invoking `rustc` for building the `unit`. | |
251 | fn rustc(cx: &mut Context<'_, '_>, unit: &Unit, exec: &Arc<dyn Executor>) -> CargoResult<Work> { | |
fe692bf9 | 252 | let mut rustc = prepare_rustc(cx, unit)?; |
0a29b90c FG |
253 | let build_plan = cx.bcx.build_config.build_plan; |
254 | ||
781aab86 | 255 | let name = unit.pkg.name(); |
0a29b90c FG |
256 | let buildkey = unit.buildkey(); |
257 | ||
0a29b90c FG |
258 | let outputs = cx.outputs(unit)?; |
259 | let root = cx.files().out_dir(unit); | |
260 | ||
261 | // Prepare the native lib state (extra `-L` and `-l` flags). | |
262 | let build_script_outputs = Arc::clone(&cx.build_script_outputs); | |
263 | let current_id = unit.pkg.package_id(); | |
264 | let manifest_path = PathBuf::from(unit.pkg.manifest_path()); | |
265 | let build_scripts = cx.build_scripts.get(unit).cloned(); | |
266 | ||
267 | // If we are a binary and the package also contains a library, then we | |
268 | // don't pass the `-l` flags. | |
269 | let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); | |
270 | ||
271 | let dep_info_name = if cx.files().use_extra_filename(unit) { | |
272 | format!( | |
273 | "{}-{}.d", | |
274 | unit.target.crate_name(), | |
275 | cx.files().metadata(unit) | |
276 | ) | |
277 | } else { | |
278 | format!("{}.d", unit.target.crate_name()) | |
279 | }; | |
280 | let rustc_dep_info_loc = root.join(dep_info_name); | |
281 | let dep_info_loc = fingerprint::dep_info_loc(cx, unit); | |
282 | ||
0a29b90c FG |
283 | let mut output_options = OutputOptions::new(cx, unit); |
284 | let package_id = unit.pkg.package_id(); | |
285 | let target = Target::clone(&unit.target); | |
286 | let mode = unit.mode; | |
287 | ||
288 | exec.init(cx, unit); | |
289 | let exec = exec.clone(); | |
290 | ||
291 | let root_output = cx.files().host_dest().to_path_buf(); | |
292 | let target_dir = cx.bcx.ws.target_dir().into_path_unlocked(); | |
293 | let pkg_root = unit.pkg.root().to_path_buf(); | |
294 | let cwd = rustc | |
295 | .get_cwd() | |
296 | .unwrap_or_else(|| cx.bcx.config.cwd()) | |
297 | .to_path_buf(); | |
298 | let fingerprint_dir = cx.files().fingerprint_dir(unit); | |
299 | let script_metadata = cx.find_build_script_metadata(unit); | |
300 | let is_local = unit.is_local(); | |
301 | let artifact = unit.artifact; | |
302 | ||
303 | let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit) | |
304 | && !matches!(cx.bcx.config.shell().verbosity(), Verbosity::Verbose); | |
305 | let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| { | |
306 | // If this unit is needed for doc-scraping, then we generate a diagnostic that | |
307 | // describes the set of reverse-dependencies that cause the unit to be needed. | |
308 | let target_desc = unit.target.description_named(); | |
309 | let mut for_scrape_units = cx | |
310 | .bcx | |
311 | .scrape_units_have_dep_on(unit) | |
312 | .into_iter() | |
313 | .map(|unit| unit.target.description_named()) | |
314 | .collect::<Vec<_>>(); | |
315 | for_scrape_units.sort(); | |
316 | let for_scrape_units = for_scrape_units.join(", "); | |
317 | make_failed_scrape_diagnostic(cx, unit, format_args!("failed to check {target_desc} in package `{name}` as a prerequisite for scraping examples from: {for_scrape_units}")) | |
318 | }); | |
319 | if hide_diagnostics_for_scrape_unit { | |
320 | output_options.show_diagnostics = false; | |
321 | } | |
322 | ||
323 | return Ok(Work::new(move |state| { | |
324 | // Artifacts are in a different location than typical units, | |
325 | // hence we must assure the crate- and target-dependent | |
326 | // directory is present. | |
327 | if artifact.is_true() { | |
328 | paths::create_dir_all(&root)?; | |
329 | } | |
330 | ||
331 | // Only at runtime have we discovered what the extra -L and -l | |
332 | // arguments are for native libraries, so we process those here. We | |
333 | // also need to be sure to add any -L paths for our plugins to the | |
334 | // dynamic library load path as a plugin's dynamic library may be | |
335 | // located somewhere in there. | |
336 | // Finally, if custom environment variables have been produced by | |
337 | // previous build scripts, we include them in the rustc invocation. | |
338 | if let Some(build_scripts) = build_scripts { | |
339 | let script_outputs = build_script_outputs.lock().unwrap(); | |
340 | if !build_plan { | |
341 | add_native_deps( | |
342 | &mut rustc, | |
343 | &script_outputs, | |
344 | &build_scripts, | |
345 | pass_l_flag, | |
346 | &target, | |
347 | current_id, | |
348 | )?; | |
349 | add_plugin_deps(&mut rustc, &script_outputs, &build_scripts, &root_output)?; | |
350 | } | |
351 | add_custom_flags(&mut rustc, &script_outputs, script_metadata)?; | |
352 | } | |
353 | ||
354 | for output in outputs.iter() { | |
355 | // If there is both an rmeta and rlib, rustc will prefer to use the | |
356 | // rlib, even if it is older. Therefore, we must delete the rlib to | |
357 | // force using the new rmeta. | |
358 | if output.path.extension() == Some(OsStr::new("rmeta")) { | |
359 | let dst = root.join(&output.path).with_extension("rlib"); | |
360 | if dst.exists() { | |
361 | paths::remove_file(&dst)?; | |
362 | } | |
363 | } | |
364 | ||
365 | // Some linkers do not remove the executable, but truncate and modify it. | |
366 | // That results in the old hard-link being modified even after renamed. | |
367 | // We delete the old artifact here to prevent this behavior from confusing users. | |
368 | // See rust-lang/cargo#8348. | |
369 | if output.hardlink.is_some() && output.path.exists() { | |
370 | _ = paths::remove_file(&output.path).map_err(|e| { | |
add651ee | 371 | tracing::debug!( |
0a29b90c FG |
372 | "failed to delete previous output file `{:?}`: {e:?}", |
373 | output.path | |
374 | ); | |
375 | }); | |
376 | } | |
377 | } | |
378 | ||
379 | fn verbose_if_simple_exit_code(err: Error) -> Error { | |
380 | // If a signal on unix (`code == None`) or an abnormal termination | |
381 | // on Windows (codes like `0xC0000409`), don't hide the error details. | |
382 | match err | |
383 | .downcast_ref::<ProcessError>() | |
384 | .as_ref() | |
385 | .and_then(|perr| perr.code) | |
386 | { | |
387 | Some(n) if cargo_util::is_simple_exit_code(n) => VerboseError::new(err).into(), | |
388 | _ => err, | |
389 | } | |
390 | } | |
391 | ||
392 | state.running(&rustc); | |
393 | let timestamp = paths::set_invocation_time(&fingerprint_dir)?; | |
394 | if build_plan { | |
395 | state.build_plan(buildkey, rustc.clone(), outputs.clone()); | |
396 | } else { | |
397 | let result = exec | |
398 | .exec( | |
399 | &rustc, | |
400 | package_id, | |
401 | &target, | |
402 | mode, | |
403 | &mut |line| on_stdout_line(state, line, package_id, &target), | |
404 | &mut |line| { | |
405 | on_stderr_line( | |
406 | state, | |
407 | line, | |
408 | package_id, | |
409 | &manifest_path, | |
410 | &target, | |
411 | &mut output_options, | |
412 | ) | |
413 | }, | |
414 | ) | |
415 | .map_err(verbose_if_simple_exit_code) | |
416 | .with_context(|| { | |
417 | // adapted from rustc_errors/src/lib.rs | |
418 | let warnings = match output_options.warnings_seen { | |
419 | 0 => String::new(), | |
420 | 1 => "; 1 warning emitted".to_string(), | |
421 | count => format!("; {} warnings emitted", count), | |
422 | }; | |
423 | let errors = match output_options.errors_seen { | |
424 | 0 => String::new(), | |
425 | 1 => " due to previous error".to_string(), | |
426 | count => format!(" due to {} previous errors", count), | |
427 | }; | |
428 | let name = descriptive_pkg_name(&name, &target, &mode); | |
429 | format!("could not compile {name}{errors}{warnings}") | |
430 | }); | |
431 | ||
432 | if let Err(e) = result { | |
433 | if let Some(diagnostic) = failed_scrape_diagnostic { | |
434 | state.warning(diagnostic)?; | |
435 | } | |
436 | ||
437 | return Err(e); | |
438 | } | |
439 | ||
440 | // Exec should never return with success *and* generate an error. | |
441 | debug_assert_eq!(output_options.errors_seen, 0); | |
442 | } | |
443 | ||
444 | if rustc_dep_info_loc.exists() { | |
445 | fingerprint::translate_dep_info( | |
446 | &rustc_dep_info_loc, | |
447 | &dep_info_loc, | |
448 | &cwd, | |
449 | &pkg_root, | |
450 | &target_dir, | |
451 | &rustc, | |
452 | // Do not track source files in the fingerprint for registry dependencies. | |
453 | is_local, | |
454 | ) | |
455 | .with_context(|| { | |
456 | internal(format!( | |
457 | "could not parse/generate dep info at: {}", | |
458 | rustc_dep_info_loc.display() | |
459 | )) | |
460 | })?; | |
461 | // This mtime shift allows Cargo to detect if a source file was | |
462 | // modified in the middle of the build. | |
463 | paths::set_file_time_no_err(dep_info_loc, timestamp); | |
464 | } | |
465 | ||
466 | Ok(()) | |
467 | })); | |
468 | ||
469 | // Add all relevant `-L` and `-l` flags from dependencies (now calculated and | |
470 | // present in `state`) to the command provided. | |
471 | fn add_native_deps( | |
472 | rustc: &mut ProcessBuilder, | |
473 | build_script_outputs: &BuildScriptOutputs, | |
474 | build_scripts: &BuildScripts, | |
475 | pass_l_flag: bool, | |
476 | target: &Target, | |
477 | current_id: PackageId, | |
478 | ) -> CargoResult<()> { | |
479 | for key in build_scripts.to_link.iter() { | |
480 | let output = build_script_outputs.get(key.1).ok_or_else(|| { | |
481 | internal(format!( | |
482 | "couldn't find build script output for {}/{}", | |
483 | key.0, key.1 | |
484 | )) | |
485 | })?; | |
486 | for path in output.library_paths.iter() { | |
487 | rustc.arg("-L").arg(path); | |
488 | } | |
489 | ||
490 | if key.0 == current_id { | |
491 | if pass_l_flag { | |
492 | for name in output.library_links.iter() { | |
493 | rustc.arg("-l").arg(name); | |
494 | } | |
495 | } | |
496 | } | |
497 | ||
498 | for (lt, arg) in &output.linker_args { | |
499 | // There was an unintentional change where cdylibs were | |
500 | // allowed to be passed via transitive dependencies. This | |
501 | // clause should have been kept in the `if` block above. For | |
502 | // now, continue allowing it for cdylib only. | |
503 | // See https://github.com/rust-lang/cargo/issues/9562 | |
fe692bf9 | 504 | if lt.applies_to(target) && (key.0 == current_id || *lt == LinkArgTarget::Cdylib) { |
0a29b90c FG |
505 | rustc.arg("-C").arg(format!("link-arg={}", arg)); |
506 | } | |
507 | } | |
508 | } | |
509 | Ok(()) | |
510 | } | |
511 | } | |
512 | ||
513 | /// Link the compiled target (often of form `foo-{metadata_hash}`) to the | |
514 | /// final target. This must happen during both "Fresh" and "Compile". | |
515 | fn link_targets(cx: &mut Context<'_, '_>, unit: &Unit, fresh: bool) -> CargoResult<Work> { | |
516 | let bcx = cx.bcx; | |
517 | let outputs = cx.outputs(unit)?; | |
518 | let export_dir = cx.files().export_dir(); | |
519 | let package_id = unit.pkg.package_id(); | |
520 | let manifest_path = PathBuf::from(unit.pkg.manifest_path()); | |
521 | let profile = unit.profile.clone(); | |
522 | let unit_mode = unit.mode; | |
523 | let features = unit.features.iter().map(|s| s.to_string()).collect(); | |
524 | let json_messages = bcx.build_config.emit_json(); | |
525 | let executable = cx.get_executable(unit)?; | |
526 | let mut target = Target::clone(&unit.target); | |
527 | if let TargetSourcePath::Metabuild = target.src_path() { | |
528 | // Give it something to serialize. | |
529 | let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); | |
530 | target.set_src_path(TargetSourcePath::Path(path)); | |
531 | } | |
532 | ||
533 | Ok(Work::new(move |state| { | |
534 | // If we're a "root crate", e.g., the target of this compilation, then we | |
535 | // hard link our outputs out of the `deps` directory into the directory | |
536 | // above. This means that `cargo build` will produce binaries in | |
537 | // `target/debug` which one probably expects. | |
538 | let mut destinations = vec![]; | |
539 | for output in outputs.iter() { | |
540 | let src = &output.path; | |
541 | // This may have been a `cargo rustc` command which changes the | |
542 | // output, so the source may not actually exist. | |
543 | if !src.exists() { | |
544 | continue; | |
545 | } | |
781aab86 FG |
546 | let Some(dst) = output.hardlink.as_ref() else { |
547 | destinations.push(src.clone()); | |
548 | continue; | |
0a29b90c FG |
549 | }; |
550 | destinations.push(dst.clone()); | |
551 | paths::link_or_copy(src, dst)?; | |
552 | if let Some(ref path) = output.export_path { | |
553 | let export_dir = export_dir.as_ref().unwrap(); | |
554 | paths::create_dir_all(export_dir)?; | |
555 | ||
556 | paths::link_or_copy(src, path)?; | |
557 | } | |
558 | } | |
559 | ||
560 | if json_messages { | |
fe692bf9 | 561 | let debuginfo = match profile.debuginfo.into_inner() { |
49aad941 FG |
562 | TomlDebugInfo::None => machine_message::ArtifactDebuginfo::Int(0), |
563 | TomlDebugInfo::Limited => machine_message::ArtifactDebuginfo::Int(1), | |
564 | TomlDebugInfo::Full => machine_message::ArtifactDebuginfo::Int(2), | |
565 | TomlDebugInfo::LineDirectivesOnly => { | |
566 | machine_message::ArtifactDebuginfo::Named("line-directives-only") | |
567 | } | |
568 | TomlDebugInfo::LineTablesOnly => { | |
569 | machine_message::ArtifactDebuginfo::Named("line-tables-only") | |
570 | } | |
fe692bf9 | 571 | }; |
0a29b90c FG |
572 | let art_profile = machine_message::ArtifactProfile { |
573 | opt_level: profile.opt_level.as_str(), | |
fe692bf9 | 574 | debuginfo: Some(debuginfo), |
0a29b90c FG |
575 | debug_assertions: profile.debug_assertions, |
576 | overflow_checks: profile.overflow_checks, | |
577 | test: unit_mode.is_any_test(), | |
578 | }; | |
579 | ||
580 | let msg = machine_message::Artifact { | |
581 | package_id, | |
582 | manifest_path, | |
583 | target: &target, | |
584 | profile: art_profile, | |
585 | features, | |
586 | filenames: destinations, | |
587 | executable, | |
588 | fresh, | |
589 | } | |
590 | .to_json_string(); | |
591 | state.stdout(msg)?; | |
592 | } | |
593 | Ok(()) | |
594 | })) | |
595 | } | |
596 | ||
597 | // For all plugin dependencies, add their -L paths (now calculated and present | |
598 | // in `build_script_outputs`) to the dynamic library load path for the command | |
599 | // to execute. | |
600 | fn add_plugin_deps( | |
601 | rustc: &mut ProcessBuilder, | |
602 | build_script_outputs: &BuildScriptOutputs, | |
603 | build_scripts: &BuildScripts, | |
604 | root_output: &Path, | |
605 | ) -> CargoResult<()> { | |
606 | let var = paths::dylib_path_envvar(); | |
607 | let search_path = rustc.get_env(var).unwrap_or_default(); | |
608 | let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>(); | |
609 | for (pkg_id, metadata) in &build_scripts.plugins { | |
610 | let output = build_script_outputs | |
611 | .get(*metadata) | |
612 | .ok_or_else(|| internal(format!("couldn't find libs for plugin dep {}", pkg_id)))?; | |
613 | search_path.append(&mut filter_dynamic_search_path( | |
614 | output.library_paths.iter(), | |
615 | root_output, | |
616 | )); | |
617 | } | |
618 | let search_path = paths::join_paths(&search_path, var)?; | |
619 | rustc.env(var, &search_path); | |
620 | Ok(()) | |
621 | } | |
622 | ||
623 | // Determine paths to add to the dynamic search path from -L entries | |
624 | // | |
625 | // Strip off prefixes like "native=" or "framework=" and filter out directories | |
626 | // **not** inside our output directory since they are likely spurious and can cause | |
627 | // clashes with system shared libraries (issue #3366). | |
628 | fn filter_dynamic_search_path<'a, I>(paths: I, root_output: &Path) -> Vec<PathBuf> | |
629 | where | |
630 | I: Iterator<Item = &'a PathBuf>, | |
631 | { | |
632 | let mut search_path = vec![]; | |
633 | for dir in paths { | |
781aab86 FG |
634 | let dir = match dir.to_str().and_then(|s| s.split_once("=")) { |
635 | Some(("native" | "crate" | "dependency" | "framework" | "all", path)) => path.into(), | |
636 | _ => dir.clone(), | |
0a29b90c FG |
637 | }; |
638 | if dir.starts_with(&root_output) { | |
639 | search_path.push(dir); | |
640 | } else { | |
641 | debug!( | |
642 | "Not including path {} in runtime library search path because it is \ | |
643 | outside target root {}", | |
644 | dir.display(), | |
645 | root_output.display() | |
646 | ); | |
647 | } | |
648 | } | |
649 | search_path | |
650 | } | |
651 | ||
fe692bf9 FG |
652 | /// Prepares flags and environments we can compute for a `rustc` invocation |
653 | /// before the job queue starts compiling any unit. | |
654 | /// | |
655 | /// This builds a static view of the invocation. Flags depending on the | |
656 | /// completion of other units will be added later in runtime, such as flags | |
657 | /// from build scripts. | |
658 | fn prepare_rustc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<ProcessBuilder> { | |
0a29b90c FG |
659 | let is_primary = cx.is_primary_package(unit); |
660 | let is_workspace = cx.bcx.ws.is_member(&unit.pkg); | |
661 | ||
662 | let mut base = cx | |
663 | .compilation | |
664 | .rustc_process(unit, is_primary, is_workspace)?; | |
665 | ||
666 | if is_primary { | |
667 | base.env("CARGO_PRIMARY_PACKAGE", "1"); | |
668 | } | |
669 | ||
670 | if unit.target.is_test() || unit.target.is_bench() { | |
671 | let tmp = cx.files().layout(unit.kind).prepare_tmp()?; | |
672 | base.env("CARGO_TARGET_TMPDIR", tmp.display().to_string()); | |
673 | } | |
674 | ||
675 | base.inherit_jobserver(&cx.jobserver); | |
fe692bf9 | 676 | build_base_args(cx, &mut base, unit)?; |
0a29b90c | 677 | build_deps_args(&mut base, cx, unit)?; |
fe692bf9 FG |
678 | add_cap_lints(cx.bcx, unit, &mut base); |
679 | base.args(cx.bcx.rustflags_args(unit)); | |
680 | if cx.bcx.config.cli_unstable().binary_dep_depinfo { | |
681 | base.arg("-Z").arg("binary-dep-depinfo"); | |
682 | } | |
0a29b90c FG |
683 | Ok(base) |
684 | } | |
685 | ||
fe692bf9 FG |
686 | /// Prepares flags and environments we can compute for a `rustdoc` invocation |
687 | /// before the job queue starts compiling any unit. | |
688 | /// | |
689 | /// This builds a static view of the invocation. Flags depending on the | |
690 | /// completion of other units will be added later in runtime, such as flags | |
691 | /// from build scripts. | |
692 | fn prepare_rustdoc(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<ProcessBuilder> { | |
0a29b90c FG |
693 | let bcx = cx.bcx; |
694 | // script_metadata is not needed here, it is only for tests. | |
695 | let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?; | |
696 | rustdoc.inherit_jobserver(&cx.jobserver); | |
697 | let crate_name = unit.target.crate_name(); | |
698 | rustdoc.arg("--crate-name").arg(&crate_name); | |
699 | add_path_args(bcx.ws, unit, &mut rustdoc); | |
700 | add_cap_lints(bcx, unit, &mut rustdoc); | |
701 | ||
702 | if let CompileKind::Target(target) = unit.kind { | |
703 | rustdoc.arg("--target").arg(target.rustc_target()); | |
704 | } | |
705 | let doc_dir = cx.files().out_dir(unit); | |
0a29b90c FG |
706 | rustdoc.arg("-o").arg(&doc_dir); |
707 | rustdoc.args(&features_args(unit)); | |
708 | rustdoc.args(&check_cfg_args(cx, unit)); | |
709 | ||
710 | add_error_format_and_color(cx, &mut rustdoc); | |
711 | add_allow_features(cx, &mut rustdoc); | |
712 | ||
49aad941 | 713 | rustdoc.args(unit.pkg.manifest().lint_rustflags()); |
0a29b90c FG |
714 | if let Some(args) = cx.bcx.extra_args_for(unit) { |
715 | rustdoc.args(args); | |
716 | } | |
717 | ||
718 | let metadata = cx.metadata_for_doc_units[unit]; | |
719 | rustdoc.arg("-C").arg(format!("metadata={}", metadata)); | |
720 | ||
0a29b90c FG |
721 | if unit.mode.is_doc_scrape() { |
722 | debug_assert!(cx.bcx.scrape_units.contains(unit)); | |
723 | ||
724 | if unit.target.is_test() { | |
725 | rustdoc.arg("--scrape-tests"); | |
726 | } | |
727 | ||
728 | rustdoc.arg("-Zunstable-options"); | |
729 | ||
730 | rustdoc | |
731 | .arg("--scrape-examples-output-path") | |
fe692bf9 | 732 | .arg(scrape_output_path(cx, unit)?); |
0a29b90c FG |
733 | |
734 | // Only scrape example for items from crates in the workspace, to reduce generated file size | |
735 | for pkg in cx.bcx.ws.members() { | |
736 | let names = pkg | |
737 | .targets() | |
738 | .iter() | |
739 | .map(|target| target.crate_name()) | |
740 | .collect::<HashSet<_>>(); | |
741 | for name in names { | |
742 | rustdoc.arg("--scrape-examples-target-crate").arg(name); | |
743 | } | |
744 | } | |
745 | } | |
746 | ||
fe692bf9 | 747 | if should_include_scrape_units(cx.bcx, unit) { |
0a29b90c | 748 | rustdoc.arg("-Zunstable-options"); |
fe692bf9 | 749 | } |
0a29b90c FG |
750 | |
751 | build_deps_args(&mut rustdoc, cx, unit)?; | |
752 | rustdoc::add_root_urls(cx, unit, &mut rustdoc)?; | |
753 | ||
754 | rustdoc.args(bcx.rustdocflags_args(unit)); | |
755 | ||
756 | if !crate_version_flag_already_present(&rustdoc) { | |
757 | append_crate_version_flag(unit, &mut rustdoc); | |
758 | } | |
759 | ||
fe692bf9 FG |
760 | Ok(rustdoc) |
761 | } | |
762 | ||
763 | /// Creates a unit of work invoking `rustdoc` for documenting the `unit`. | |
764 | fn rustdoc(cx: &mut Context<'_, '_>, unit: &Unit) -> CargoResult<Work> { | |
765 | let mut rustdoc = prepare_rustdoc(cx, unit)?; | |
766 | ||
767 | let crate_name = unit.target.crate_name(); | |
768 | let doc_dir = cx.files().out_dir(unit); | |
769 | // Create the documentation directory ahead of time as rustdoc currently has | |
770 | // a bug where concurrent invocations will race to create this directory if | |
771 | // it doesn't already exist. | |
772 | paths::create_dir_all(&doc_dir)?; | |
773 | ||
0a29b90c | 774 | let target_desc = unit.target.description_named(); |
781aab86 | 775 | let name = unit.pkg.name(); |
0a29b90c FG |
776 | let build_script_outputs = Arc::clone(&cx.build_script_outputs); |
777 | let package_id = unit.pkg.package_id(); | |
778 | let manifest_path = PathBuf::from(unit.pkg.manifest_path()); | |
779 | let target = Target::clone(&unit.target); | |
780 | let mut output_options = OutputOptions::new(cx, unit); | |
781 | let script_metadata = cx.find_build_script_metadata(unit); | |
fe692bf9 FG |
782 | let scrape_outputs = if should_include_scrape_units(cx.bcx, unit) { |
783 | Some( | |
784 | cx.bcx | |
785 | .scrape_units | |
786 | .iter() | |
787 | .map(|unit| Ok((cx.files().metadata(unit), scrape_output_path(cx, unit)?))) | |
788 | .collect::<CargoResult<HashMap<_, _>>>()?, | |
789 | ) | |
790 | } else { | |
791 | None | |
792 | }; | |
0a29b90c FG |
793 | |
794 | let failed_scrape_units = Arc::clone(&cx.failed_scrape_units); | |
795 | let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit) | |
796 | && !matches!(cx.bcx.config.shell().verbosity(), Verbosity::Verbose); | |
797 | let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| { | |
798 | make_failed_scrape_diagnostic( | |
799 | cx, | |
800 | unit, | |
801 | format_args!("failed to scan {target_desc} in package `{name}` for example code usage"), | |
802 | ) | |
803 | }); | |
804 | if hide_diagnostics_for_scrape_unit { | |
805 | output_options.show_diagnostics = false; | |
806 | } | |
807 | ||
808 | Ok(Work::new(move |state| { | |
809 | add_custom_flags( | |
810 | &mut rustdoc, | |
811 | &build_script_outputs.lock().unwrap(), | |
812 | script_metadata, | |
813 | )?; | |
814 | ||
815 | // Add the output of scraped examples to the rustdoc command. | |
816 | // This action must happen after the unit's dependencies have finished, | |
817 | // because some of those deps may be Docscrape units which have failed. | |
818 | // So we dynamically determine which `--with-examples` flags to pass here. | |
819 | if let Some(scrape_outputs) = scrape_outputs { | |
820 | let failed_scrape_units = failed_scrape_units.lock().unwrap(); | |
821 | for (metadata, output_path) in &scrape_outputs { | |
822 | if !failed_scrape_units.contains(metadata) { | |
823 | rustdoc.arg("--with-examples").arg(output_path); | |
824 | } | |
825 | } | |
826 | } | |
827 | ||
828 | let crate_dir = doc_dir.join(&crate_name); | |
829 | if crate_dir.exists() { | |
830 | // Remove output from a previous build. This ensures that stale | |
831 | // files for removed items are removed. | |
832 | debug!("removing pre-existing doc directory {:?}", crate_dir); | |
833 | paths::remove_dir_all(crate_dir)?; | |
834 | } | |
835 | state.running(&rustdoc); | |
836 | ||
837 | let result = rustdoc | |
838 | .exec_with_streaming( | |
839 | &mut |line| on_stdout_line(state, line, package_id, &target), | |
840 | &mut |line| { | |
841 | on_stderr_line( | |
842 | state, | |
843 | line, | |
844 | package_id, | |
845 | &manifest_path, | |
846 | &target, | |
847 | &mut output_options, | |
848 | ) | |
849 | }, | |
850 | false, | |
851 | ) | |
852 | .with_context(|| format!("could not document `{}`", name)); | |
853 | ||
854 | if let Err(e) = result { | |
855 | if let Some(diagnostic) = failed_scrape_diagnostic { | |
856 | state.warning(diagnostic)?; | |
857 | } | |
858 | ||
859 | return Err(e); | |
860 | } | |
861 | ||
862 | Ok(()) | |
863 | })) | |
864 | } | |
865 | ||
866 | // The --crate-version flag could have already been passed in RUSTDOCFLAGS | |
867 | // or as an extra compiler argument for rustdoc | |
868 | fn crate_version_flag_already_present(rustdoc: &ProcessBuilder) -> bool { | |
869 | rustdoc.get_args().any(|flag| { | |
870 | flag.to_str() | |
871 | .map_or(false, |flag| flag.starts_with(RUSTDOC_CRATE_VERSION_FLAG)) | |
872 | }) | |
873 | } | |
874 | ||
875 | fn append_crate_version_flag(unit: &Unit, rustdoc: &mut ProcessBuilder) { | |
876 | rustdoc | |
877 | .arg(RUSTDOC_CRATE_VERSION_FLAG) | |
878 | .arg(unit.pkg.version().to_string()); | |
879 | } | |
880 | ||
881 | /// Adds [`--cap-lints`] to the command to execute. | |
882 | /// | |
883 | /// [`--cap-lints`]: https://doc.rust-lang.org/nightly/rustc/lints/levels.html#capping-lints | |
884 | fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuilder) { | |
885 | // If this is an upstream dep we don't want warnings from, turn off all | |
886 | // lints. | |
887 | if !unit.show_warnings(bcx.config) { | |
888 | cmd.arg("--cap-lints").arg("allow"); | |
889 | ||
890 | // If this is an upstream dep but we *do* want warnings, make sure that they | |
891 | // don't fail compilation. | |
892 | } else if !unit.is_local() { | |
893 | cmd.arg("--cap-lints").arg("warn"); | |
894 | } | |
895 | } | |
896 | ||
897 | /// Forwards [`-Zallow-features`] if it is set for cargo. | |
898 | /// | |
899 | /// [`-Zallow-features`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#allow-features | |
900 | fn add_allow_features(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { | |
901 | if let Some(allow) = &cx.bcx.config.cli_unstable().allow_features { | |
902 | let mut arg = String::from("-Zallow-features="); | |
903 | let _ = iter_join_onto(&mut arg, allow, ","); | |
904 | cmd.arg(&arg); | |
905 | } | |
906 | } | |
907 | ||
908 | /// Adds [`--error-format`] to the command to execute. | |
909 | /// | |
910 | /// Cargo always uses JSON output. This has several benefits, such as being | |
911 | /// easier to parse, handles changing formats (for replaying cached messages), | |
912 | /// ensures atomic output (so messages aren't interleaved), allows for | |
913 | /// intercepting messages like rmeta artifacts, etc. rustc includes a | |
914 | /// "rendered" field in the JSON message with the message properly formatted, | |
915 | /// which Cargo will extract and display to the user. | |
916 | /// | |
917 | /// [`--error-format`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--error-format-control-how-errors-are-produced | |
918 | fn add_error_format_and_color(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder) { | |
919 | cmd.arg("--error-format=json"); | |
920 | let mut json = String::from("--json=diagnostic-rendered-ansi,artifacts,future-incompat"); | |
921 | ||
922 | match cx.bcx.build_config.message_format { | |
923 | MessageFormat::Short | MessageFormat::Json { short: true, .. } => { | |
924 | json.push_str(",diagnostic-short"); | |
925 | } | |
926 | _ => {} | |
927 | } | |
928 | cmd.arg(json); | |
929 | ||
930 | let config = cx.bcx.config; | |
931 | if let Some(width) = config.shell().err_width().diagnostic_terminal_width() { | |
932 | cmd.arg(format!("--diagnostic-width={width}")); | |
933 | } | |
934 | } | |
935 | ||
936 | /// Adds essential rustc flags and environment variables to the command to execute. | |
fe692bf9 | 937 | fn build_base_args(cx: &Context<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit) -> CargoResult<()> { |
0a29b90c FG |
938 | assert!(!unit.mode.is_run_custom_build()); |
939 | ||
940 | let bcx = cx.bcx; | |
941 | let Profile { | |
942 | ref opt_level, | |
943 | codegen_backend, | |
944 | codegen_units, | |
945 | debuginfo, | |
946 | debug_assertions, | |
947 | split_debuginfo, | |
948 | overflow_checks, | |
949 | rpath, | |
950 | ref panic, | |
951 | incremental, | |
952 | strip, | |
fe692bf9 | 953 | rustflags: profile_rustflags, |
ed00b5ec | 954 | trim_paths, |
0a29b90c FG |
955 | .. |
956 | } = unit.profile.clone(); | |
957 | let test = unit.mode.is_any_test(); | |
958 | ||
959 | cmd.arg("--crate-name").arg(&unit.target.crate_name()); | |
960 | ||
961 | let edition = unit.target.edition(); | |
962 | edition.cmd_edition_arg(cmd); | |
963 | ||
964 | add_path_args(bcx.ws, unit, cmd); | |
965 | add_error_format_and_color(cx, cmd); | |
966 | add_allow_features(cx, cmd); | |
967 | ||
968 | let mut contains_dy_lib = false; | |
969 | if !test { | |
fe692bf9 | 970 | for crate_type in &unit.target.rustc_crate_types() { |
0a29b90c FG |
971 | cmd.arg("--crate-type").arg(crate_type.as_str()); |
972 | contains_dy_lib |= crate_type == &CrateType::Dylib; | |
973 | } | |
974 | } | |
975 | ||
976 | if unit.mode.is_check() { | |
977 | cmd.arg("--emit=dep-info,metadata"); | |
978 | } else if !unit.requires_upstream_objects() { | |
979 | // Always produce metadata files for rlib outputs. Metadata may be used | |
980 | // in this session for a pipelined compilation, or it may be used in a | |
981 | // future Cargo session as part of a pipelined compile. | |
982 | cmd.arg("--emit=dep-info,metadata,link"); | |
983 | } else { | |
984 | cmd.arg("--emit=dep-info,link"); | |
985 | } | |
986 | ||
987 | let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) | |
988 | || (contains_dy_lib && !cx.is_primary_package(unit)); | |
989 | if prefer_dynamic { | |
990 | cmd.arg("-C").arg("prefer-dynamic"); | |
991 | } | |
992 | ||
993 | if opt_level.as_str() != "0" { | |
994 | cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); | |
995 | } | |
996 | ||
0a29b90c FG |
997 | if *panic != PanicStrategy::Unwind { |
998 | cmd.arg("-C").arg(format!("panic={}", panic)); | |
999 | } | |
1000 | ||
1001 | cmd.args(<o_args(cx, unit)); | |
1002 | ||
0a29b90c FG |
1003 | if let Some(backend) = codegen_backend { |
1004 | cmd.arg("-Z").arg(&format!("codegen-backend={}", backend)); | |
1005 | } | |
1006 | ||
1007 | if let Some(n) = codegen_units { | |
1008 | cmd.arg("-C").arg(&format!("codegen-units={}", n)); | |
1009 | } | |
1010 | ||
fe692bf9 FG |
1011 | let debuginfo = debuginfo.into_inner(); |
1012 | // Shorten the number of arguments if possible. | |
1013 | if debuginfo != TomlDebugInfo::None { | |
1014 | cmd.arg("-C").arg(format!("debuginfo={debuginfo}")); | |
1015 | // This is generally just an optimization on build time so if we don't | |
1016 | // pass it then it's ok. The values for the flag (off, packed, unpacked) | |
1017 | // may be supported or not depending on the platform, so availability is | |
1018 | // checked per-value. For example, at the time of writing this code, on | |
1019 | // Windows the only stable valid value for split-debuginfo is "packed", | |
1020 | // while on Linux "unpacked" is also stable. | |
1021 | if let Some(split) = split_debuginfo { | |
1022 | if cx | |
1023 | .bcx | |
1024 | .target_data | |
1025 | .info(unit.kind) | |
1026 | .supports_debuginfo_split(split) | |
1027 | { | |
1028 | cmd.arg("-C").arg(format!("split-debuginfo={split}")); | |
1029 | } | |
1030 | } | |
0a29b90c FG |
1031 | } |
1032 | ||
ed00b5ec FG |
1033 | if let Some(trim_paths) = trim_paths { |
1034 | trim_paths_args(cmd, cx, unit, &trim_paths)?; | |
1035 | } | |
1036 | ||
49aad941 | 1037 | cmd.args(unit.pkg.manifest().lint_rustflags()); |
fe692bf9 | 1038 | cmd.args(&profile_rustflags); |
0a29b90c FG |
1039 | if let Some(args) = cx.bcx.extra_args_for(unit) { |
1040 | cmd.args(args); | |
1041 | } | |
1042 | ||
1043 | // `-C overflow-checks` is implied by the setting of `-C debug-assertions`, | |
1044 | // so we only need to provide `-C overflow-checks` if it differs from | |
1045 | // the value of `-C debug-assertions` we would provide. | |
1046 | if opt_level.as_str() != "0" { | |
1047 | if debug_assertions { | |
1048 | cmd.args(&["-C", "debug-assertions=on"]); | |
1049 | if !overflow_checks { | |
1050 | cmd.args(&["-C", "overflow-checks=off"]); | |
1051 | } | |
1052 | } else if overflow_checks { | |
1053 | cmd.args(&["-C", "overflow-checks=on"]); | |
1054 | } | |
1055 | } else if !debug_assertions { | |
1056 | cmd.args(&["-C", "debug-assertions=off"]); | |
1057 | if overflow_checks { | |
1058 | cmd.args(&["-C", "overflow-checks=on"]); | |
1059 | } | |
1060 | } else if !overflow_checks { | |
1061 | cmd.args(&["-C", "overflow-checks=off"]); | |
1062 | } | |
1063 | ||
1064 | if test && unit.target.harness() { | |
1065 | cmd.arg("--test"); | |
1066 | ||
1067 | // Cargo has historically never compiled `--test` binaries with | |
1068 | // `panic=abort` because the `test` crate itself didn't support it. | |
1069 | // Support is now upstream, however, but requires an unstable flag to be | |
1070 | // passed when compiling the test. We require, in Cargo, an unstable | |
1071 | // flag to pass to rustc, so register that here. Eventually this flag | |
1072 | // will simply not be needed when the behavior is stabilized in the Rust | |
1073 | // compiler itself. | |
1074 | if *panic == PanicStrategy::Abort { | |
1075 | cmd.arg("-Z").arg("panic-abort-tests"); | |
1076 | } | |
1077 | } else if test { | |
1078 | cmd.arg("--cfg").arg("test"); | |
1079 | } | |
1080 | ||
1081 | cmd.args(&features_args(unit)); | |
1082 | cmd.args(&check_cfg_args(cx, unit)); | |
1083 | ||
1084 | let meta = cx.files().metadata(unit); | |
1085 | cmd.arg("-C").arg(&format!("metadata={}", meta)); | |
1086 | if cx.files().use_extra_filename(unit) { | |
1087 | cmd.arg("-C").arg(&format!("extra-filename=-{}", meta)); | |
1088 | } | |
1089 | ||
1090 | if rpath { | |
1091 | cmd.arg("-C").arg("rpath"); | |
1092 | } | |
1093 | ||
1094 | cmd.arg("--out-dir").arg(&cx.files().out_dir(unit)); | |
1095 | ||
1096 | fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) { | |
1097 | if let Some(val) = val { | |
1098 | let mut joined = OsString::from(prefix); | |
1099 | joined.push(val); | |
1100 | cmd.arg(key).arg(joined); | |
1101 | } | |
1102 | } | |
1103 | ||
1104 | if let CompileKind::Target(n) = unit.kind { | |
1105 | cmd.arg("--target").arg(n.rustc_target()); | |
1106 | } | |
1107 | ||
1108 | opt( | |
1109 | cmd, | |
1110 | "-C", | |
1111 | "linker=", | |
781aab86 FG |
1112 | cx.compilation |
1113 | .target_linker(unit.kind) | |
1114 | .as_ref() | |
1115 | .map(|s| s.as_ref()), | |
0a29b90c FG |
1116 | ); |
1117 | if incremental { | |
1118 | let dir = cx.files().layout(unit.kind).incremental().as_os_str(); | |
1119 | opt(cmd, "-C", "incremental=", Some(dir)); | |
1120 | } | |
1121 | ||
1122 | if strip != Strip::None { | |
1123 | cmd.arg("-C").arg(format!("strip={}", strip)); | |
1124 | } | |
1125 | ||
1126 | if unit.is_std { | |
1127 | // -Zforce-unstable-if-unmarked prevents the accidental use of | |
1128 | // unstable crates within the sysroot (such as "extern crate libc" or | |
1129 | // any non-public crate in the sysroot). | |
1130 | // | |
1131 | // RUSTC_BOOTSTRAP allows unstable features on stable. | |
1132 | cmd.arg("-Z") | |
1133 | .arg("force-unstable-if-unmarked") | |
1134 | .env("RUSTC_BOOTSTRAP", "1"); | |
1135 | } | |
1136 | ||
1137 | // Add `CARGO_BIN_EXE_` environment variables for building tests. | |
1138 | if unit.target.is_test() || unit.target.is_bench() { | |
1139 | for bin_target in unit | |
1140 | .pkg | |
1141 | .manifest() | |
1142 | .targets() | |
1143 | .iter() | |
1144 | .filter(|target| target.is_bin()) | |
1145 | { | |
1146 | let exe_path = cx | |
1147 | .files() | |
1148 | .bin_link_for_target(bin_target, unit.kind, cx.bcx)?; | |
1149 | let name = bin_target | |
1150 | .binary_filename() | |
1151 | .unwrap_or(bin_target.name().to_string()); | |
1152 | let key = format!("CARGO_BIN_EXE_{}", name); | |
1153 | cmd.env(&key, exe_path); | |
1154 | } | |
1155 | } | |
1156 | Ok(()) | |
1157 | } | |
1158 | ||
1159 | /// All active features for the unit passed as `--cfg features=<feature-name>`. | |
1160 | fn features_args(unit: &Unit) -> Vec<OsString> { | |
1161 | let mut args = Vec::with_capacity(unit.features.len() * 2); | |
1162 | ||
1163 | for feat in &unit.features { | |
1164 | args.push(OsString::from("--cfg")); | |
1165 | args.push(OsString::from(format!("feature=\"{}\"", feat))); | |
1166 | } | |
1167 | ||
1168 | args | |
1169 | } | |
1170 | ||
ed00b5ec FG |
1171 | /// Generates the `--remap-path-scope` and `--remap-path-prefix` for [RFC 3127]. |
1172 | /// See also unstable feature [`-Ztrim-paths`]. | |
1173 | /// | |
1174 | /// [RFC 3127]: https://rust-lang.github.io/rfcs/3127-trim-paths.html | |
1175 | /// [`-Ztrim-paths`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-trim-paths-option | |
1176 | fn trim_paths_args( | |
1177 | cmd: &mut ProcessBuilder, | |
1178 | cx: &Context<'_, '_>, | |
1179 | unit: &Unit, | |
1180 | trim_paths: &TomlTrimPaths, | |
1181 | ) -> CargoResult<()> { | |
1182 | if trim_paths.is_none() { | |
1183 | return Ok(()); | |
1184 | } | |
1185 | ||
1186 | // feature gate was checked during mainfest/config parsing. | |
1187 | cmd.arg("-Zunstable-options"); | |
1188 | cmd.arg(format!("-Zremap-path-scope={trim_paths}")); | |
1189 | ||
1190 | let sysroot_remap = { | |
1191 | let sysroot = &cx.bcx.target_data.info(unit.kind).sysroot; | |
1192 | let mut remap = OsString::from("--remap-path-prefix="); | |
1193 | remap.push(sysroot); | |
1194 | remap.push("/lib/rustlib/src/rust"); // See also `detect_sysroot_src_path()`. | |
1195 | remap.push("="); | |
1196 | remap.push("/rustc/"); | |
1197 | // This remap logic aligns with rustc: | |
1198 | // <https://github.com/rust-lang/rust/blob/c2ef3516/src/bootstrap/src/lib.rs#L1113-L1116> | |
1199 | if let Some(commit_hash) = cx.bcx.rustc().commit_hash.as_ref() { | |
1200 | remap.push(commit_hash); | |
1201 | } else { | |
1202 | remap.push(cx.bcx.rustc().version.to_string()); | |
1203 | } | |
1204 | remap | |
1205 | }; | |
1206 | cmd.arg(sysroot_remap); | |
1207 | ||
1208 | let package_remap = { | |
1209 | let pkg_root = unit.pkg.root(); | |
1210 | let ws_root = cx.bcx.ws.root(); | |
1211 | let is_local = unit.pkg.package_id().source_id().is_path(); | |
1212 | let mut remap = OsString::from("--remap-path-prefix="); | |
1213 | // Remapped to path relative to workspace root: | |
1214 | // | |
1215 | // * path dependencies under workspace root directory | |
1216 | // | |
1217 | // Remapped to `<pkg>-<version>` | |
1218 | // | |
1219 | // * registry dependencies | |
1220 | // * git dependencies | |
1221 | // * path dependencies outside workspace root directory | |
1222 | if is_local && pkg_root.strip_prefix(ws_root).is_ok() { | |
1223 | remap.push(ws_root); | |
1224 | remap.push("="); // empty to remap to relative paths. | |
1225 | } else { | |
1226 | remap.push(pkg_root); | |
1227 | remap.push("="); | |
1228 | remap.push(unit.pkg.name()); | |
1229 | remap.push("-"); | |
1230 | remap.push(unit.pkg.version().to_string()); | |
1231 | } | |
1232 | remap | |
1233 | }; | |
1234 | cmd.arg(package_remap); | |
1235 | ||
1236 | Ok(()) | |
1237 | } | |
1238 | ||
0a29b90c FG |
1239 | /// Generates the `--check-cfg` arguments for the `unit`. |
1240 | /// See unstable feature [`check-cfg`]. | |
1241 | /// | |
1242 | /// [`check-cfg`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg | |
1243 | fn check_cfg_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec<OsString> { | |
ed00b5ec FG |
1244 | if cx.bcx.config.cli_unstable().check_cfg { |
1245 | // This generate something like this: | |
1246 | // - cfg(feature, values()) | |
1247 | // - cfg(feature, values("foo", "bar")) | |
1248 | // | |
1249 | // NOTE: Despite only explicitly specifying `feature`, well known names and values | |
1250 | // are implicitly enabled when one or more `--check-cfg` argument is passed. | |
1251 | ||
1252 | let gross_cap_estimation = unit.pkg.summary().features().len() * 7 + 25; | |
1253 | let mut arg_feature = OsString::with_capacity(gross_cap_estimation); | |
1254 | arg_feature.push("cfg(feature, values("); | |
1255 | for (i, feature) in unit.pkg.summary().features().keys().enumerate() { | |
1256 | if i != 0 { | |
1257 | arg_feature.push(", "); | |
0a29b90c | 1258 | } |
ed00b5ec FG |
1259 | arg_feature.push("\""); |
1260 | arg_feature.push(feature); | |
1261 | arg_feature.push("\""); | |
0a29b90c | 1262 | } |
ed00b5ec | 1263 | arg_feature.push("))"); |
0a29b90c | 1264 | |
ed00b5ec FG |
1265 | vec![ |
1266 | OsString::from("-Zunstable-options"), | |
1267 | OsString::from("--check-cfg"), | |
1268 | arg_feature, | |
1269 | ] | |
0a29b90c FG |
1270 | } else { |
1271 | Vec::new() | |
1272 | } | |
1273 | } | |
1274 | ||
1275 | /// Adds LTO related codegen flags. | |
1276 | fn lto_args(cx: &Context<'_, '_>, unit: &Unit) -> Vec<OsString> { | |
1277 | let mut result = Vec::new(); | |
1278 | let mut push = |arg: &str| { | |
1279 | result.push(OsString::from("-C")); | |
1280 | result.push(OsString::from(arg)); | |
1281 | }; | |
1282 | match cx.lto[unit] { | |
1283 | lto::Lto::Run(None) => push("lto"), | |
1284 | lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)), | |
1285 | lto::Lto::Off => { | |
1286 | push("lto=off"); | |
1287 | push("embed-bitcode=no"); | |
1288 | } | |
1289 | lto::Lto::ObjectAndBitcode => {} // this is rustc's default | |
1290 | lto::Lto::OnlyBitcode => push("linker-plugin-lto"), | |
1291 | lto::Lto::OnlyObject => push("embed-bitcode=no"), | |
1292 | } | |
1293 | result | |
1294 | } | |
1295 | ||
1296 | /// Adds dependency-relevant rustc flags and environment variables | |
1297 | /// to the command to execute, such as [`-L`] and [`--extern`]. | |
1298 | /// | |
1299 | /// [`-L`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#-l-add-a-directory-to-the-library-search-path | |
1300 | /// [`--extern`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--extern-specify-where-an-external-library-is-located | |
fe692bf9 | 1301 | fn build_deps_args(cmd: &mut ProcessBuilder, cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<()> { |
0a29b90c FG |
1302 | let bcx = cx.bcx; |
1303 | cmd.arg("-L").arg(&{ | |
1304 | let mut deps = OsString::from("dependency="); | |
1305 | deps.push(cx.files().deps_dir(unit)); | |
1306 | deps | |
1307 | }); | |
1308 | ||
1309 | // Be sure that the host path is also listed. This'll ensure that proc macro | |
1310 | // dependencies are correctly found (for reexported macros). | |
1311 | if !unit.kind.is_host() { | |
1312 | cmd.arg("-L").arg(&{ | |
1313 | let mut deps = OsString::from("dependency="); | |
1314 | deps.push(cx.files().host_deps()); | |
1315 | deps | |
1316 | }); | |
1317 | } | |
1318 | ||
1319 | let deps = cx.unit_deps(unit); | |
1320 | ||
1321 | // If there is not one linkable target but should, rustc fails later | |
1322 | // on if there is an `extern crate` for it. This may turn into a hard | |
1323 | // error in the future (see PR #4797). | |
1324 | if !deps | |
1325 | .iter() | |
1326 | .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_linkable()) | |
1327 | { | |
1328 | if let Some(dep) = deps.iter().find(|dep| { | |
1329 | !dep.unit.mode.is_doc() && dep.unit.target.is_lib() && !dep.unit.artifact.is_true() | |
1330 | }) { | |
1331 | bcx.config.shell().warn(format!( | |
1332 | "The package `{}` \ | |
1333 | provides no linkable target. The compiler might raise an error while compiling \ | |
1334 | `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ | |
1335 | Cargo.toml. This warning might turn into a hard error in the future.", | |
1336 | dep.unit.target.crate_name(), | |
1337 | unit.target.crate_name(), | |
1338 | dep.unit.target.crate_name() | |
1339 | ))?; | |
1340 | } | |
1341 | } | |
1342 | ||
1343 | let mut unstable_opts = false; | |
1344 | ||
1345 | for dep in deps { | |
1346 | if dep.unit.mode.is_run_custom_build() { | |
1347 | cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit)); | |
1348 | } | |
1349 | } | |
1350 | ||
1351 | for arg in extern_args(cx, unit, &mut unstable_opts)? { | |
1352 | cmd.arg(arg); | |
1353 | } | |
1354 | ||
1355 | for (var, env) in artifact::get_env(cx, deps)? { | |
1356 | cmd.env(&var, env); | |
1357 | } | |
1358 | ||
1359 | // This will only be set if we're already using a feature | |
1360 | // requiring nightly rust | |
1361 | if unstable_opts { | |
1362 | cmd.arg("-Z").arg("unstable-options"); | |
1363 | } | |
1364 | ||
1365 | Ok(()) | |
1366 | } | |
1367 | ||
1368 | /// Adds extra rustc flags and environment variables collected from the output | |
1369 | /// of a build-script to the command to execute, include custom environment | |
1370 | /// variables and `cfg`. | |
1371 | fn add_custom_flags( | |
1372 | cmd: &mut ProcessBuilder, | |
1373 | build_script_outputs: &BuildScriptOutputs, | |
1374 | metadata: Option<Metadata>, | |
1375 | ) -> CargoResult<()> { | |
1376 | if let Some(metadata) = metadata { | |
1377 | if let Some(output) = build_script_outputs.get(metadata) { | |
1378 | for cfg in output.cfgs.iter() { | |
1379 | cmd.arg("--cfg").arg(cfg); | |
1380 | } | |
1381 | if !output.check_cfgs.is_empty() { | |
1382 | cmd.arg("-Zunstable-options"); | |
1383 | for check_cfg in &output.check_cfgs { | |
1384 | cmd.arg("--check-cfg").arg(check_cfg); | |
1385 | } | |
1386 | } | |
781aab86 | 1387 | for (name, value) in output.env.iter() { |
0a29b90c FG |
1388 | cmd.env(name, value); |
1389 | } | |
1390 | } | |
1391 | } | |
1392 | ||
1393 | Ok(()) | |
1394 | } | |
1395 | ||
1396 | /// Generates a list of `--extern` arguments. | |
1397 | pub fn extern_args( | |
1398 | cx: &Context<'_, '_>, | |
1399 | unit: &Unit, | |
1400 | unstable_opts: &mut bool, | |
1401 | ) -> CargoResult<Vec<OsString>> { | |
1402 | let mut result = Vec::new(); | |
1403 | let deps = cx.unit_deps(unit); | |
1404 | ||
1405 | // Closure to add one dependency to `result`. | |
1406 | let mut link_to = | |
1407 | |dep: &UnitDep, extern_crate_name: InternedString, noprelude: bool| -> CargoResult<()> { | |
1408 | let mut value = OsString::new(); | |
1409 | let mut opts = Vec::new(); | |
1410 | if unit | |
1411 | .pkg | |
1412 | .manifest() | |
1413 | .unstable_features() | |
1414 | .require(Feature::public_dependency()) | |
1415 | .is_ok() | |
1416 | && !dep.public | |
1417 | { | |
1418 | opts.push("priv"); | |
1419 | *unstable_opts = true; | |
1420 | } | |
1421 | if noprelude { | |
1422 | opts.push("noprelude"); | |
1423 | *unstable_opts = true; | |
1424 | } | |
1425 | if !opts.is_empty() { | |
1426 | value.push(opts.join(",")); | |
1427 | value.push(":"); | |
1428 | } | |
1429 | value.push(extern_crate_name.as_str()); | |
1430 | value.push("="); | |
1431 | ||
1432 | let mut pass = |file| { | |
1433 | let mut value = value.clone(); | |
1434 | value.push(file); | |
1435 | result.push(OsString::from("--extern")); | |
1436 | result.push(value); | |
1437 | }; | |
1438 | ||
1439 | let outputs = cx.outputs(&dep.unit)?; | |
1440 | ||
1441 | if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { | |
1442 | // Example: rlib dependency for an rlib, rmeta is all that is required. | |
1443 | let output = outputs | |
1444 | .iter() | |
1445 | .find(|output| output.flavor == FileFlavor::Rmeta) | |
1446 | .expect("failed to find rmeta dep for pipelined dep"); | |
1447 | pass(&output.path); | |
1448 | } else { | |
1449 | // Example: a bin needs `rlib` for dependencies, it cannot use rmeta. | |
1450 | for output in outputs.iter() { | |
1451 | if output.flavor == FileFlavor::Linkable { | |
1452 | pass(&output.path); | |
1453 | } | |
1454 | } | |
1455 | } | |
1456 | Ok(()) | |
1457 | }; | |
1458 | ||
1459 | for dep in deps { | |
1460 | if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { | |
1461 | link_to(dep, dep.extern_crate_name, dep.noprelude)?; | |
1462 | } | |
1463 | } | |
1464 | if unit.target.proc_macro() { | |
1465 | // Automatically import `proc_macro`. | |
1466 | result.push(OsString::from("--extern")); | |
1467 | result.push(OsString::from("proc_macro")); | |
1468 | } | |
1469 | ||
1470 | Ok(result) | |
1471 | } | |
1472 | ||
1473 | fn envify(s: &str) -> String { | |
1474 | s.chars() | |
1475 | .flat_map(|c| c.to_uppercase()) | |
1476 | .map(|c| if c == '-' { '_' } else { c }) | |
1477 | .collect() | |
1478 | } | |
1479 | ||
1480 | /// Configuration of the display of messages emitted by the compiler, | |
1481 | /// e.g. diagnostics, warnings, errors, and message caching. | |
1482 | struct OutputOptions { | |
1483 | /// What format we're emitting from Cargo itself. | |
1484 | format: MessageFormat, | |
0a29b90c FG |
1485 | /// Where to write the JSON messages to support playback later if the unit |
1486 | /// is fresh. The file is created lazily so that in the normal case, lots | |
1487 | /// of empty files are not created. If this is None, the output will not | |
1488 | /// be cached (such as when replaying cached messages). | |
1489 | cache_cell: Option<(PathBuf, LazyCell<File>)>, | |
1490 | /// If `true`, display any diagnostics. | |
1491 | /// Other types of JSON messages are processed regardless | |
1492 | /// of the value of this flag. | |
1493 | /// | |
1494 | /// This is used primarily for cache replay. If you build with `-vv`, the | |
1495 | /// cache will be filled with diagnostics from dependencies. When the | |
1496 | /// cache is replayed without `-vv`, we don't want to show them. | |
1497 | show_diagnostics: bool, | |
1498 | /// Tracks the number of warnings we've seen so far. | |
1499 | warnings_seen: usize, | |
1500 | /// Tracks the number of errors we've seen so far. | |
1501 | errors_seen: usize, | |
1502 | } | |
1503 | ||
1504 | impl OutputOptions { | |
1505 | fn new(cx: &Context<'_, '_>, unit: &Unit) -> OutputOptions { | |
0a29b90c FG |
1506 | let path = cx.files().message_cache_path(unit); |
1507 | // Remove old cache, ignore ENOENT, which is the common case. | |
1508 | drop(fs::remove_file(&path)); | |
1509 | let cache_cell = Some((path, LazyCell::new())); | |
1510 | OutputOptions { | |
1511 | format: cx.bcx.build_config.message_format, | |
0a29b90c FG |
1512 | cache_cell, |
1513 | show_diagnostics: true, | |
1514 | warnings_seen: 0, | |
1515 | errors_seen: 0, | |
1516 | } | |
1517 | } | |
1518 | } | |
1519 | ||
1520 | fn on_stdout_line( | |
1521 | state: &JobState<'_, '_>, | |
1522 | line: &str, | |
1523 | _package_id: PackageId, | |
1524 | _target: &Target, | |
1525 | ) -> CargoResult<()> { | |
1526 | state.stdout(line.to_string())?; | |
1527 | Ok(()) | |
1528 | } | |
1529 | ||
1530 | fn on_stderr_line( | |
1531 | state: &JobState<'_, '_>, | |
1532 | line: &str, | |
1533 | package_id: PackageId, | |
1534 | manifest_path: &std::path::Path, | |
1535 | target: &Target, | |
1536 | options: &mut OutputOptions, | |
1537 | ) -> CargoResult<()> { | |
1538 | if on_stderr_line_inner(state, line, package_id, manifest_path, target, options)? { | |
1539 | // Check if caching is enabled. | |
1540 | if let Some((path, cell)) = &mut options.cache_cell { | |
1541 | // Cache the output, which will be replayed later when Fresh. | |
1542 | let f = cell.try_borrow_mut_with(|| paths::create(path))?; | |
1543 | debug_assert!(!line.contains('\n')); | |
1544 | f.write_all(line.as_bytes())?; | |
1545 | f.write_all(&[b'\n'])?; | |
1546 | } | |
1547 | } | |
1548 | Ok(()) | |
1549 | } | |
1550 | ||
1551 | /// Returns true if the line should be cached. | |
1552 | fn on_stderr_line_inner( | |
1553 | state: &JobState<'_, '_>, | |
1554 | line: &str, | |
1555 | package_id: PackageId, | |
1556 | manifest_path: &std::path::Path, | |
1557 | target: &Target, | |
1558 | options: &mut OutputOptions, | |
1559 | ) -> CargoResult<bool> { | |
1560 | // We primarily want to use this function to process JSON messages from | |
1561 | // rustc. The compiler should always print one JSON message per line, and | |
1562 | // otherwise it may have other output intermingled (think RUST_LOG or | |
1563 | // something like that), so skip over everything that doesn't look like a | |
1564 | // JSON message. | |
1565 | if !line.starts_with('{') { | |
1566 | state.stderr(line.to_string())?; | |
1567 | return Ok(true); | |
1568 | } | |
1569 | ||
1570 | let mut compiler_message: Box<serde_json::value::RawValue> = match serde_json::from_str(line) { | |
1571 | Ok(msg) => msg, | |
1572 | ||
1573 | // If the compiler produced a line that started with `{` but it wasn't | |
1574 | // valid JSON, maybe it wasn't JSON in the first place! Forward it along | |
1575 | // to stderr. | |
1576 | Err(e) => { | |
1577 | debug!("failed to parse json: {:?}", e); | |
1578 | state.stderr(line.to_string())?; | |
1579 | return Ok(true); | |
1580 | } | |
1581 | }; | |
1582 | ||
1583 | let count_diagnostic = |level, options: &mut OutputOptions| { | |
1584 | if level == "warning" { | |
1585 | options.warnings_seen += 1; | |
1586 | } else if level == "error" { | |
1587 | options.errors_seen += 1; | |
1588 | } | |
1589 | }; | |
1590 | ||
1591 | if let Ok(report) = serde_json::from_str::<FutureIncompatReport>(compiler_message.get()) { | |
1592 | for item in &report.future_incompat_report { | |
1593 | count_diagnostic(&*item.diagnostic.level, options); | |
1594 | } | |
1595 | state.future_incompat_report(report.future_incompat_report); | |
1596 | return Ok(true); | |
1597 | } | |
1598 | ||
1599 | // Depending on what we're emitting from Cargo itself, we figure out what to | |
1600 | // do with this JSON message. | |
1601 | match options.format { | |
1602 | // In the "human" output formats (human/short) or if diagnostic messages | |
1603 | // from rustc aren't being included in the output of Cargo's JSON | |
1604 | // messages then we extract the diagnostic (if present) here and handle | |
1605 | // it ourselves. | |
1606 | MessageFormat::Human | |
1607 | | MessageFormat::Short | |
1608 | | MessageFormat::Json { | |
1609 | render_diagnostics: true, | |
1610 | .. | |
1611 | } => { | |
1612 | #[derive(serde::Deserialize)] | |
1613 | struct CompilerMessage { | |
1614 | rendered: String, | |
1615 | message: String, | |
1616 | level: String, | |
1617 | children: Vec<PartialDiagnostic>, | |
1618 | } | |
1619 | ||
1620 | // A partial rustfix::diagnostics::Diagnostic. We deserialize only a | |
1621 | // subset of the fields because rustc's output can be extremely | |
1622 | // deeply nested JSON in pathological cases involving macro | |
1623 | // expansion. Rustfix's Diagnostic struct is recursive containing a | |
1624 | // field `children: Vec<Self>`, and it can cause deserialization to | |
1625 | // hit serde_json's default recursion limit, or overflow the stack | |
1626 | // if we turn that off. Cargo only cares about the 1 field listed | |
1627 | // here. | |
1628 | #[derive(serde::Deserialize)] | |
1629 | struct PartialDiagnostic { | |
1630 | spans: Vec<PartialDiagnosticSpan>, | |
1631 | } | |
1632 | ||
1633 | // A partial rustfix::diagnostics::DiagnosticSpan. | |
1634 | #[derive(serde::Deserialize)] | |
1635 | struct PartialDiagnosticSpan { | |
1636 | suggestion_applicability: Option<Applicability>, | |
1637 | } | |
1638 | ||
1639 | if let Ok(mut msg) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) { | |
1640 | if msg.message.starts_with("aborting due to") | |
1641 | || msg.message.ends_with("warning emitted") | |
1642 | || msg.message.ends_with("warnings emitted") | |
1643 | { | |
1644 | // Skip this line; we'll print our own summary at the end. | |
1645 | return Ok(true); | |
1646 | } | |
1647 | // state.stderr will add a newline | |
1648 | if msg.rendered.ends_with('\n') { | |
1649 | msg.rendered.pop(); | |
1650 | } | |
781aab86 | 1651 | let rendered = msg.rendered; |
0a29b90c FG |
1652 | if options.show_diagnostics { |
1653 | let machine_applicable: bool = msg | |
1654 | .children | |
1655 | .iter() | |
1656 | .map(|child| { | |
1657 | child | |
1658 | .spans | |
1659 | .iter() | |
1660 | .filter_map(|span| span.suggestion_applicability) | |
1661 | .any(|app| app == Applicability::MachineApplicable) | |
1662 | }) | |
1663 | .any(|b| b); | |
1664 | count_diagnostic(&msg.level, options); | |
1665 | state.emit_diag(msg.level, rendered, machine_applicable)?; | |
1666 | } | |
1667 | return Ok(true); | |
1668 | } | |
1669 | } | |
1670 | ||
1671 | // Remove color information from the rendered string if color is not | |
1672 | // enabled. Cargo always asks for ANSI colors from rustc. This allows | |
1673 | // cached replay to enable/disable colors without re-invoking rustc. | |
1674 | MessageFormat::Json { ansi: false, .. } => { | |
1675 | #[derive(serde::Deserialize, serde::Serialize)] | |
1676 | struct CompilerMessage { | |
1677 | rendered: String, | |
1678 | #[serde(flatten)] | |
1679 | other: std::collections::BTreeMap<String, serde_json::Value>, | |
1680 | } | |
1681 | if let Ok(mut error) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) { | |
781aab86 | 1682 | error.rendered = anstream::adapter::strip_str(&error.rendered).to_string(); |
0a29b90c FG |
1683 | let new_line = serde_json::to_string(&error)?; |
1684 | let new_msg: Box<serde_json::value::RawValue> = serde_json::from_str(&new_line)?; | |
1685 | compiler_message = new_msg; | |
1686 | } | |
1687 | } | |
1688 | ||
1689 | // If ansi colors are desired then we should be good to go! We can just | |
1690 | // pass through this message as-is. | |
1691 | MessageFormat::Json { ansi: true, .. } => {} | |
1692 | } | |
1693 | ||
1694 | // We always tell rustc to emit messages about artifacts being produced. | |
1695 | // These messages feed into pipelined compilation, as well as timing | |
1696 | // information. | |
1697 | // | |
1698 | // Look for a matching directive and inform Cargo internally that a | |
1699 | // metadata file has been produced. | |
1700 | #[derive(serde::Deserialize)] | |
1701 | struct ArtifactNotification { | |
1702 | artifact: String, | |
1703 | } | |
1704 | ||
1705 | if let Ok(artifact) = serde_json::from_str::<ArtifactNotification>(compiler_message.get()) { | |
1706 | trace!("found directive from rustc: `{}`", artifact.artifact); | |
1707 | if artifact.artifact.ends_with(".rmeta") { | |
1708 | debug!("looks like metadata finished early!"); | |
1709 | state.rmeta_produced(); | |
1710 | } | |
1711 | return Ok(false); | |
1712 | } | |
1713 | ||
1714 | // And failing all that above we should have a legitimate JSON diagnostic | |
1715 | // from the compiler, so wrap it in an external Cargo JSON message | |
1716 | // indicating which package it came from and then emit it. | |
1717 | ||
1718 | if !options.show_diagnostics { | |
1719 | return Ok(true); | |
1720 | } | |
1721 | ||
1722 | #[derive(serde::Deserialize)] | |
1723 | struct CompilerMessage { | |
1724 | level: String, | |
1725 | } | |
1726 | if let Ok(message) = serde_json::from_str::<CompilerMessage>(compiler_message.get()) { | |
1727 | count_diagnostic(&message.level, options); | |
1728 | } | |
1729 | ||
1730 | let msg = machine_message::FromCompiler { | |
1731 | package_id, | |
1732 | manifest_path, | |
1733 | target, | |
1734 | message: compiler_message, | |
1735 | } | |
1736 | .to_json_string(); | |
1737 | ||
1738 | // Switch json lines from rustc/rustdoc that appear on stderr to stdout | |
1739 | // instead. We want the stdout of Cargo to always be machine parseable as | |
1740 | // stderr has our colorized human-readable messages. | |
1741 | state.stdout(msg)?; | |
1742 | Ok(true) | |
1743 | } | |
1744 | ||
1745 | /// Creates a unit of work that replays the cached compiler message. | |
1746 | /// | |
1747 | /// Usually used when a job is fresh and doesn't need to recompile. | |
1748 | fn replay_output_cache( | |
1749 | package_id: PackageId, | |
1750 | manifest_path: PathBuf, | |
1751 | target: &Target, | |
1752 | path: PathBuf, | |
1753 | format: MessageFormat, | |
0a29b90c FG |
1754 | show_diagnostics: bool, |
1755 | ) -> Work { | |
1756 | let target = target.clone(); | |
1757 | let mut options = OutputOptions { | |
1758 | format, | |
0a29b90c FG |
1759 | cache_cell: None, |
1760 | show_diagnostics, | |
1761 | warnings_seen: 0, | |
1762 | errors_seen: 0, | |
1763 | }; | |
1764 | Work::new(move |state| { | |
1765 | if !path.exists() { | |
1766 | // No cached output, probably didn't emit anything. | |
1767 | return Ok(()); | |
1768 | } | |
1769 | // We sometimes have gigabytes of output from the compiler, so avoid | |
1770 | // loading it all into memory at once, as that can cause OOM where | |
1771 | // otherwise there would be none. | |
1772 | let file = paths::open(&path)?; | |
1773 | let mut reader = std::io::BufReader::new(file); | |
1774 | let mut line = String::new(); | |
1775 | loop { | |
1776 | let length = reader.read_line(&mut line)?; | |
1777 | if length == 0 { | |
1778 | break; | |
1779 | } | |
1780 | let trimmed = line.trim_end_matches(&['\n', '\r'][..]); | |
1781 | on_stderr_line( | |
1782 | state, | |
1783 | trimmed, | |
1784 | package_id, | |
1785 | &manifest_path, | |
1786 | &target, | |
1787 | &mut options, | |
1788 | )?; | |
1789 | line.clear(); | |
1790 | } | |
1791 | Ok(()) | |
1792 | }) | |
1793 | } | |
1794 | ||
1795 | /// Provides a package name with descriptive target information, | |
1796 | /// e.g., '`foo` (bin "bar" test)', '`foo` (lib doctest)'. | |
1797 | fn descriptive_pkg_name(name: &str, target: &Target, mode: &CompileMode) -> String { | |
1798 | let desc_name = target.description_named(); | |
1799 | let mode = if mode.is_rustc_test() && !(target.is_test() || target.is_bench()) { | |
1800 | " test" | |
1801 | } else if mode.is_doc_test() { | |
1802 | " doctest" | |
1803 | } else if mode.is_doc() { | |
1804 | " doc" | |
1805 | } else { | |
1806 | "" | |
1807 | }; | |
1808 | format!("`{name}` ({desc_name}{mode})") | |
1809 | } | |
49aad941 FG |
1810 | |
1811 | /// Applies environment variables from config `[env]` to [`ProcessBuilder`]. | |
1812 | fn apply_env_config(config: &crate::Config, cmd: &mut ProcessBuilder) -> CargoResult<()> { | |
1813 | for (key, value) in config.env_config()?.iter() { | |
1814 | // never override a value that has already been set by cargo | |
1815 | if cmd.get_envs().contains_key(key) { | |
1816 | continue; | |
1817 | } | |
1818 | ||
1819 | if value.is_force() || config.get_env_os(key).is_none() { | |
1820 | cmd.env(key, value.resolve(config)); | |
1821 | } | |
1822 | } | |
1823 | Ok(()) | |
1824 | } | |
fe692bf9 FG |
1825 | |
1826 | /// Checks if there are some scrape units waiting to be processed. | |
1827 | fn should_include_scrape_units(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool { | |
1828 | unit.mode.is_doc() && bcx.scrape_units.len() > 0 && bcx.ws.unit_needs_doc_scrape(unit) | |
1829 | } | |
1830 | ||
1831 | /// Gets the file path of function call information output from `rustdoc`. | |
1832 | fn scrape_output_path(cx: &Context<'_, '_>, unit: &Unit) -> CargoResult<PathBuf> { | |
1833 | assert!(unit.mode.is_doc() || unit.mode.is_doc_scrape()); | |
1834 | cx.outputs(unit).map(|outputs| outputs[0].path.clone()) | |
1835 | } |