]> git.proxmox.com Git - rustc.git/blob - src/bootstrap/lib.rs
New upstream version 1.63.0+dfsg1
[rustc.git] / src / bootstrap / lib.rs
1 //! Implementation of rustbuild, the Rust build system.
2 //!
3 //! This module, and its descendants, are the implementation of the Rust build
4 //! system. Most of this build system is backed by Cargo but the outer layer
5 //! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
6 //! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
7 //!
8 //! * To be an easily understandable, easily extensible, and maintainable build
9 //! system.
10 //! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
11 //! crates.io and Cargo.
12 //! * A standard interface to build across all platforms, including MSVC
13 //!
14 //! ## Architecture
15 //!
16 //! The build system defers most of the complicated logic managing invocations
17 //! of rustc and rustdoc to Cargo itself. However, moving through various stages
18 //! and copying artifacts is still necessary for it to do. Each time rustbuild
19 //! is invoked, it will iterate through the list of predefined steps and execute
20 //! each serially in turn if it matches the paths passed or is a default rule.
21 //! For each step rustbuild relies on the step internally being incremental and
22 //! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
23 //! to appropriate test harnesses and such.
24 //!
25 //! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
26 //! have its own parallelism and incremental management. Later steps, like
27 //! tests, aren't incremental and simply run the entire suite currently.
28 //! However, compiletest itself tries to avoid running tests when the artifacts
29 //! that are involved (mainly the compiler) haven't changed.
30 //!
31 //! When you execute `x.py build`, the steps executed are:
32 //!
33 //! * First, the python script is run. This will automatically download the
34 //! stage0 rustc and cargo according to `src/stage0.json`, or use the cached
35 //! versions if they're available. These are then used to compile rustbuild
36 //! itself (using Cargo). Finally, control is then transferred to rustbuild.
37 //!
38 //! * Rustbuild takes over, performs sanity checks, probes the environment,
39 //! reads configuration, and starts executing steps as it reads the command
40 //! line arguments (paths) or going through the default rules.
41 //!
42 //! The build output will be something like the following:
43 //!
44 //! Building stage0 std artifacts
45 //! Copying stage0 std
46 //! Building stage0 test artifacts
47 //! Copying stage0 test
48 //! Building stage0 compiler artifacts
49 //! Copying stage0 rustc
50 //! Assembling stage1 compiler
51 //! Building stage1 std artifacts
52 //! Copying stage1 std
53 //! Building stage1 test artifacts
54 //! Copying stage1 test
55 //! Building stage1 compiler artifacts
56 //! Copying stage1 rustc
57 //! Assembling stage2 compiler
58 //! Uplifting stage1 std
59 //! Uplifting stage1 test
60 //! Uplifting stage1 rustc
61 //!
62 //! Let's disect that a little:
63 //!
64 //! ## Building stage0 {std,test,compiler} artifacts
65 //!
66 //! These steps use the provided (downloaded, usually) compiler to compile the
67 //! local Rust source into libraries we can use.
68 //!
69 //! ## Copying stage0 {std,test,rustc}
70 //!
71 //! This copies the build output from Cargo into
72 //! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: this step's
73 //! documentation should be expanded -- the information already here may be
74 //! incorrect.
75 //!
76 //! ## Assembling stage1 compiler
77 //!
78 //! This copies the libraries we built in "building stage0 ... artifacts" into
79 //! the stage1 compiler's lib directory. These are the host libraries that the
80 //! compiler itself uses to run. These aren't actually used by artifacts the new
81 //! compiler generates. This step also copies the rustc and rustdoc binaries we
82 //! generated into build/$HOST/stage/bin.
83 //!
84 //! The stage1/bin/rustc is a fully functional compiler, but it doesn't yet have
85 //! any libraries to link built binaries or libraries to. The next 3 steps will
86 //! provide those libraries for it; they are mostly equivalent to constructing
87 //! the stage1/bin compiler so we don't go through them individually.
88 //!
89 //! ## Uplifting stage1 {std,test,rustc}
90 //!
91 //! This step copies the libraries from the stage1 compiler sysroot into the
92 //! stage2 compiler. This is done to avoid rebuilding the compiler; libraries
93 //! we'd build in this step should be identical (in function, if not necessarily
94 //! identical on disk) so there's no need to recompile the compiler again. Note
95 //! that if you want to, you can enable the full-bootstrap option to change this
96 //! behavior.
97 //!
98 //! Each step is driven by a separate Cargo project and rustbuild orchestrates
99 //! copying files between steps and otherwise preparing for Cargo to run.
100 //!
101 //! ## Further information
102 //!
103 //! More documentation can be found in each respective module below, and you can
104 //! also check out the `src/bootstrap/README.md` file for more information.
105
106 use std::cell::{Cell, RefCell};
107 use std::collections::{HashMap, HashSet};
108 use std::env;
109 use std::fs::{self, File};
110 use std::path::{Path, PathBuf};
111 use std::process::{self, Command};
112 use std::str;
113
114 #[cfg(unix)]
115 use std::os::unix::fs::symlink as symlink_file;
116 #[cfg(windows)]
117 use std::os::windows::fs::symlink_file;
118
119 use filetime::FileTime;
120 use once_cell::sync::OnceCell;
121
122 use crate::builder::Kind;
123 use crate::config::{LlvmLibunwind, TargetSelection};
124 use crate::util::{
125 check_run, exe, libdir, mtime, output, run, run_suppressed, t, try_run, try_run_suppressed,
126 CiEnv,
127 };
128
129 mod builder;
130 mod cache;
131 mod cc_detect;
132 mod channel;
133 mod check;
134 mod clean;
135 mod compile;
136 mod config;
137 mod dist;
138 mod doc;
139 mod flags;
140 mod format;
141 mod install;
142 mod metadata;
143 mod native;
144 mod run;
145 mod sanity;
146 mod setup;
147 mod tarball;
148 mod test;
149 mod tool;
150 mod toolstate;
151 pub mod util;
152
153 #[cfg(feature = "build-metrics")]
154 mod metrics;
155
156 #[cfg(windows)]
157 mod job;
158
159 #[cfg(all(unix, not(target_os = "haiku")))]
160 mod job {
161 pub unsafe fn setup(build: &mut crate::Build) {
162 if build.config.low_priority {
163 libc::setpriority(libc::PRIO_PGRP as _, 0, 10);
164 }
165 }
166 }
167
168 #[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))]
169 mod job {
170 pub unsafe fn setup(_build: &mut crate::Build) {}
171 }
172
173 pub use crate::builder::PathSet;
174 use crate::cache::{Interned, INTERNER};
175 pub use crate::config::Config;
176 pub use crate::flags::Subcommand;
177
178 const LLVM_TOOLS: &[&str] = &[
179 "llvm-cov", // used to generate coverage report
180 "llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility
181 "llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
182 "llvm-objdump", // used to disassemble programs
183 "llvm-profdata", // used to inspect and merge files generated by profiles
184 "llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide
185 "llvm-size", // used to prints the size of the linker sections of a program
186 "llvm-strip", // used to discard symbols from binary files to reduce their size
187 "llvm-ar", // used for creating and modifying archive files
188 "llvm-as", // used to convert LLVM assembly to LLVM bitcode
189 "llvm-dis", // used to disassemble LLVM bitcode
190 "llc", // used to compile LLVM bytecode
191 "opt", // used to optimize LLVM bytecode
192 ];
193
194 pub const VERSION: usize = 2;
195
196 /// Extra --check-cfg to add when building
197 /// (Mode restriction, config name, config values (if any))
198 const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)] = &[
199 (None, "bootstrap", None),
200 (Some(Mode::Rustc), "parallel_compiler", None),
201 (Some(Mode::ToolRustc), "parallel_compiler", None),
202 (Some(Mode::Std), "stdarch_intel_sde", None),
203 (Some(Mode::Std), "no_fp_fmt_parse", None),
204 (Some(Mode::Std), "no_global_oom_handling", None),
205 (Some(Mode::Std), "freebsd12", None),
206 (Some(Mode::Std), "backtrace_in_libstd", None),
207 /* Extra values not defined in the built-in targets yet, but used in std */
208 (Some(Mode::Std), "target_env", Some(&["libnx"])),
209 (Some(Mode::Std), "target_os", Some(&["watchos"])),
210 (
211 Some(Mode::Std),
212 "target_arch",
213 Some(&["asmjs", "spirv", "nvptx", "nvptx64", "le32", "xtensa"]),
214 ),
215 /* Extra names used by dependencies */
216 // FIXME: Used by rustfmt is their test but is invalid (neither cargo nor bootstrap ever set
217 // this config) should probably by removed or use a allow attribute.
218 (Some(Mode::ToolRustc), "release", None),
219 // FIXME: Used by stdarch in their test, should use a allow attribute instead.
220 (Some(Mode::Std), "dont_compile_me", None),
221 // FIXME: Used by serde_json, but we should not be triggering on external dependencies.
222 (Some(Mode::Rustc), "no_btreemap_remove_entry", None),
223 (Some(Mode::ToolRustc), "no_btreemap_remove_entry", None),
224 // FIXME: Used by crossbeam-utils, but we should not be triggering on external dependencies.
225 (Some(Mode::Rustc), "crossbeam_loom", None),
226 (Some(Mode::ToolRustc), "crossbeam_loom", None),
227 // FIXME: Used by proc-macro2, but we should not be triggering on external dependencies.
228 (Some(Mode::Rustc), "span_locations", None),
229 (Some(Mode::ToolRustc), "span_locations", None),
230 ];
231
232 /// A structure representing a Rust compiler.
233 ///
234 /// Each compiler has a `stage` that it is associated with and a `host` that
235 /// corresponds to the platform the compiler runs on. This structure is used as
236 /// a parameter to many methods below.
237 #[derive(Eq, PartialOrd, Ord, PartialEq, Clone, Copy, Hash, Debug)]
238 pub struct Compiler {
239 stage: u32,
240 host: TargetSelection,
241 }
242
243 #[derive(PartialEq, Eq, Copy, Clone, Debug)]
244 pub enum DocTests {
245 /// Run normal tests and doc tests (default).
246 Yes,
247 /// Do not run any doc tests.
248 No,
249 /// Only run doc tests.
250 Only,
251 }
252
253 pub enum GitRepo {
254 Rustc,
255 Llvm,
256 }
257
258 /// Global configuration for the build system.
259 ///
260 /// This structure transitively contains all configuration for the build system.
261 /// All filesystem-encoded configuration is in `config`, all flags are in
262 /// `flags`, and then parsed or probed information is listed in the keys below.
263 ///
264 /// This structure is a parameter of almost all methods in the build system,
265 /// although most functions are implemented as free functions rather than
266 /// methods specifically on this structure itself (to make it easier to
267 /// organize).
268 pub struct Build {
269 /// User-specified configuration from `config.toml`.
270 config: Config,
271
272 // Version information
273 version: String,
274
275 // Properties derived from the above configuration
276 src: PathBuf,
277 out: PathBuf,
278 bootstrap_out: PathBuf,
279 rust_info: channel::GitInfo,
280 cargo_info: channel::GitInfo,
281 rls_info: channel::GitInfo,
282 rust_analyzer_info: channel::GitInfo,
283 clippy_info: channel::GitInfo,
284 miri_info: channel::GitInfo,
285 rustfmt_info: channel::GitInfo,
286 in_tree_llvm_info: channel::GitInfo,
287 local_rebuild: bool,
288 fail_fast: bool,
289 doc_tests: DocTests,
290 verbosity: usize,
291
292 // Targets for which to build
293 build: TargetSelection,
294 hosts: Vec<TargetSelection>,
295 targets: Vec<TargetSelection>,
296
297 initial_rustc: PathBuf,
298 initial_cargo: PathBuf,
299 initial_lld: PathBuf,
300 initial_libdir: PathBuf,
301
302 // Runtime state filled in later on
303 // C/C++ compilers and archiver for all targets
304 cc: HashMap<TargetSelection, cc::Tool>,
305 cxx: HashMap<TargetSelection, cc::Tool>,
306 ar: HashMap<TargetSelection, PathBuf>,
307 ranlib: HashMap<TargetSelection, PathBuf>,
308 // Miscellaneous
309 // allow bidirectional lookups: both name -> path and path -> name
310 crates: HashMap<Interned<String>, Crate>,
311 crate_paths: HashMap<PathBuf, Interned<String>>,
312 is_sudo: bool,
313 ci_env: CiEnv,
314 delayed_failures: RefCell<Vec<String>>,
315 prerelease_version: Cell<Option<u32>>,
316 tool_artifacts:
317 RefCell<HashMap<TargetSelection, HashMap<String, (&'static str, PathBuf, Vec<String>)>>>,
318
319 #[cfg(feature = "build-metrics")]
320 metrics: metrics::BuildMetrics,
321 }
322
323 #[derive(Debug)]
324 struct Crate {
325 name: Interned<String>,
326 deps: HashSet<Interned<String>>,
327 path: PathBuf,
328 }
329
330 impl Crate {
331 fn local_path(&self, build: &Build) -> PathBuf {
332 self.path.strip_prefix(&build.config.src).unwrap().into()
333 }
334 }
335
336 /// When building Rust various objects are handled differently.
337 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
338 pub enum DependencyType {
339 /// Libraries originating from proc-macros.
340 Host,
341 /// Typical Rust libraries.
342 Target,
343 /// Non Rust libraries and objects shipped to ease usage of certain targets.
344 TargetSelfContained,
345 }
346
347 /// The various "modes" of invoking Cargo.
348 ///
349 /// These entries currently correspond to the various output directories of the
350 /// build system, with each mod generating output in a different directory.
351 #[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
352 pub enum Mode {
353 /// Build the standard library, placing output in the "stageN-std" directory.
354 Std,
355
356 /// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory.
357 Rustc,
358
359 /// Build a codegen backend for rustc, placing the output in the "stageN-codegen" directory.
360 Codegen,
361
362 /// Build a tool, placing output in the "stage0-bootstrap-tools"
363 /// directory. This is for miscellaneous sets of tools that are built
364 /// using the bootstrap stage0 compiler in its entirety (target libraries
365 /// and all). Typically these tools compile with stable Rust.
366 ToolBootstrap,
367
368 /// Build a tool which uses the locally built std, placing output in the
369 /// "stageN-tools" directory. Its usage is quite rare, mainly used by
370 /// compiletest which needs libtest.
371 ToolStd,
372
373 /// Build a tool which uses the locally built rustc and the target std,
374 /// placing the output in the "stageN-tools" directory. This is used for
375 /// anything that needs a fully functional rustc, such as rustdoc, clippy,
376 /// cargo, rls, rustfmt, miri, etc.
377 ToolRustc,
378 }
379
380 impl Mode {
381 pub fn is_tool(&self) -> bool {
382 matches!(self, Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd)
383 }
384
385 pub fn must_support_dlopen(&self) -> bool {
386 matches!(self, Mode::Std | Mode::Codegen)
387 }
388 }
389
390 pub enum CLang {
391 C,
392 Cxx,
393 }
394
395 impl Build {
396 /// Creates a new set of build configuration from the `flags` on the command
397 /// line and the filesystem `config`.
398 ///
399 /// By default all build output will be placed in the current directory.
400 pub fn new(config: Config) -> Build {
401 let src = config.src.clone();
402 let out = config.out.clone();
403
404 let is_sudo = match env::var_os("SUDO_USER") {
405 Some(sudo_user) => match env::var_os("USER") {
406 Some(user) => user != sudo_user,
407 None => false,
408 },
409 None => false,
410 };
411
412 let ignore_git = config.ignore_git;
413 let rust_info = channel::GitInfo::new(ignore_git, &src);
414 let cargo_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/cargo"));
415 let rls_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/rls"));
416 let rust_analyzer_info =
417 channel::GitInfo::new(ignore_git, &src.join("src/tools/rust-analyzer"));
418 let clippy_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/clippy"));
419 let miri_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/miri"));
420 let rustfmt_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/rustfmt"));
421
422 // we always try to use git for LLVM builds
423 let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
424
425 let initial_target_libdir_str = if config.dry_run {
426 "/dummy/lib/path/to/lib/".to_string()
427 } else {
428 output(
429 Command::new(&config.initial_rustc)
430 .arg("--target")
431 .arg(config.build.rustc_target_arg())
432 .arg("--print")
433 .arg("target-libdir"),
434 )
435 };
436 let initial_target_dir = Path::new(&initial_target_libdir_str).parent().unwrap();
437 let initial_lld = initial_target_dir.join("bin").join("rust-lld");
438
439 let initial_sysroot = if config.dry_run {
440 "/dummy".to_string()
441 } else {
442 output(Command::new(&config.initial_rustc).arg("--print").arg("sysroot"))
443 };
444 let initial_libdir = initial_target_dir
445 .parent()
446 .unwrap()
447 .parent()
448 .unwrap()
449 .strip_prefix(initial_sysroot.trim())
450 .unwrap()
451 .to_path_buf();
452
453 let version = std::fs::read_to_string(src.join("src").join("version"))
454 .expect("failed to read src/version");
455 let version = version.trim();
456
457 let bootstrap_out = if std::env::var("BOOTSTRAP_PYTHON").is_ok() {
458 out.join("bootstrap").join("debug")
459 } else {
460 let workspace_target_dir = std::env::var("CARGO_TARGET_DIR")
461 .map(PathBuf::from)
462 .unwrap_or_else(|_| src.join("target"));
463 let bootstrap_out = workspace_target_dir.join("debug");
464 if !bootstrap_out.join("rustc").exists() && !cfg!(test) {
465 // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
466 panic!("run `cargo build --bins` before `cargo run`")
467 }
468 bootstrap_out
469 };
470
471 let mut build = Build {
472 initial_rustc: config.initial_rustc.clone(),
473 initial_cargo: config.initial_cargo.clone(),
474 initial_lld,
475 initial_libdir,
476 local_rebuild: config.local_rebuild,
477 fail_fast: config.cmd.fail_fast(),
478 doc_tests: config.cmd.doc_tests(),
479 verbosity: config.verbose,
480
481 build: config.build,
482 hosts: config.hosts.clone(),
483 targets: config.targets.clone(),
484
485 config,
486 version: version.to_string(),
487 src,
488 out,
489 bootstrap_out,
490
491 rust_info,
492 cargo_info,
493 rls_info,
494 rust_analyzer_info,
495 clippy_info,
496 miri_info,
497 rustfmt_info,
498 in_tree_llvm_info,
499 cc: HashMap::new(),
500 cxx: HashMap::new(),
501 ar: HashMap::new(),
502 ranlib: HashMap::new(),
503 crates: HashMap::new(),
504 crate_paths: HashMap::new(),
505 is_sudo,
506 ci_env: CiEnv::current(),
507 delayed_failures: RefCell::new(Vec::new()),
508 prerelease_version: Cell::new(None),
509 tool_artifacts: Default::default(),
510
511 #[cfg(feature = "build-metrics")]
512 metrics: metrics::BuildMetrics::init(),
513 };
514
515 build.verbose("finding compilers");
516 cc_detect::find(&mut build);
517 // When running `setup`, the profile is about to change, so any requirements we have now may
518 // be different on the next invocation. Don't check for them until the next time x.py is
519 // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
520 if !matches!(build.config.cmd, Subcommand::Setup { .. }) {
521 build.verbose("running sanity check");
522 sanity::check(&mut build);
523 }
524
525 // If local-rust is the same major.minor as the current version, then force a
526 // local-rebuild
527 let local_version_verbose =
528 output(Command::new(&build.initial_rustc).arg("--version").arg("--verbose"));
529 let local_release = local_version_verbose
530 .lines()
531 .filter_map(|x| x.strip_prefix("release:"))
532 .next()
533 .unwrap()
534 .trim();
535 if local_release.split('.').take(2).eq(version.split('.').take(2)) {
536 build.verbose(&format!("auto-detected local-rebuild {}", local_release));
537 build.local_rebuild = true;
538 }
539
540 build.verbose("learning about cargo");
541 metadata::build(&mut build);
542
543 build
544 }
545
546 // modified from `check_submodule` and `update_submodule` in bootstrap.py
547 /// Given a path to the directory of a submodule, update it.
548 ///
549 /// `relative_path` should be relative to the root of the git repository, not an absolute path.
550 pub(crate) fn update_submodule(&self, relative_path: &Path) {
551 fn dir_is_empty(dir: &Path) -> bool {
552 t!(std::fs::read_dir(dir)).next().is_none()
553 }
554
555 if !self.config.submodules(&self.rust_info) {
556 return;
557 }
558
559 let absolute_path = self.config.src.join(relative_path);
560
561 // NOTE: The check for the empty directory is here because when running x.py the first time,
562 // the submodule won't be checked out. Check it out now so we can build it.
563 if !channel::GitInfo::new(false, &absolute_path).is_git() && !dir_is_empty(&absolute_path) {
564 return;
565 }
566
567 // check_submodule
568 let checked_out_hash =
569 output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path));
570 // update_submodules
571 let recorded = output(
572 Command::new("git")
573 .args(&["ls-tree", "HEAD"])
574 .arg(relative_path)
575 .current_dir(&self.config.src),
576 );
577 let actual_hash = recorded
578 .split_whitespace()
579 .nth(2)
580 .unwrap_or_else(|| panic!("unexpected output `{}`", recorded));
581
582 // update_submodule
583 if actual_hash == checked_out_hash.trim_end() {
584 // already checked out
585 return;
586 }
587
588 println!("Updating submodule {}", relative_path.display());
589 self.run(
590 Command::new("git")
591 .args(&["submodule", "-q", "sync"])
592 .arg(relative_path)
593 .current_dir(&self.config.src),
594 );
595
596 // Try passing `--progress` to start, then run git again without if that fails.
597 let update = |progress: bool| {
598 let mut git = Command::new("git");
599 git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]);
600 if progress {
601 git.arg("--progress");
602 }
603 git.arg(relative_path).current_dir(&self.config.src);
604 git
605 };
606 // NOTE: doesn't use `try_run` because this shouldn't print an error if it fails.
607 if !update(true).status().map_or(false, |status| status.success()) {
608 self.run(&mut update(false));
609 }
610
611 self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path));
612 self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(absolute_path));
613 }
614
615 /// If any submodule has been initialized already, sync it unconditionally.
616 /// This avoids contributors checking in a submodule change by accident.
617 pub fn maybe_update_submodules(&self) {
618 // WARNING: keep this in sync with the submodules hard-coded in bootstrap.py
619 let mut bootstrap_submodules: Vec<&str> = vec![
620 "src/tools/rust-installer",
621 "src/tools/cargo",
622 "src/tools/rls",
623 "src/tools/miri",
624 "library/backtrace",
625 "library/stdarch",
626 ];
627 // As in bootstrap.py, we include `rust-analyzer` if `build.vendor` was set in
628 // `config.toml`.
629 if self.config.vendor {
630 bootstrap_submodules.push("src/tools/rust-analyzer");
631 }
632 // Avoid running git when there isn't a git checkout.
633 if !self.config.submodules(&self.rust_info) {
634 return;
635 }
636 let output = output(
637 Command::new("git")
638 .args(&["config", "--file"])
639 .arg(&self.config.src.join(".gitmodules"))
640 .args(&["--get-regexp", "path"]),
641 );
642 for line in output.lines() {
643 // Look for `submodule.$name.path = $path`
644 // Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
645 let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
646 // avoid updating submodules twice
647 if !bootstrap_submodules.iter().any(|&p| Path::new(p) == submodule)
648 && channel::GitInfo::new(false, submodule).is_git()
649 {
650 self.update_submodule(submodule);
651 }
652 }
653 }
654
655 /// Executes the entire build, as configured by the flags and configuration.
656 pub fn build(&mut self) {
657 unsafe {
658 job::setup(self);
659 }
660
661 self.maybe_update_submodules();
662
663 if let Subcommand::Format { check, paths } = &self.config.cmd {
664 return format::format(&builder::Builder::new(&self), *check, &paths);
665 }
666
667 if let Subcommand::Clean { all } = self.config.cmd {
668 return clean::clean(self, all);
669 }
670
671 if let Subcommand::Setup { profile } = &self.config.cmd {
672 return setup::setup(&self.config, *profile);
673 }
674
675 {
676 let builder = builder::Builder::new(&self);
677 if let Some(path) = builder.paths.get(0) {
678 if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") {
679 return;
680 }
681 }
682 }
683
684 if !self.config.dry_run {
685 {
686 self.config.dry_run = true;
687 let builder = builder::Builder::new(&self);
688 builder.execute_cli();
689 }
690 self.config.dry_run = false;
691 let builder = builder::Builder::new(&self);
692 builder.execute_cli();
693 } else {
694 let builder = builder::Builder::new(&self);
695 builder.execute_cli();
696 }
697
698 // Check for postponed failures from `test --no-fail-fast`.
699 let failures = self.delayed_failures.borrow();
700 if failures.len() > 0 {
701 eprintln!("\n{} command(s) did not execute successfully:\n", failures.len());
702 for failure in failures.iter() {
703 eprintln!(" - {}\n", failure);
704 }
705 process::exit(1);
706 }
707
708 #[cfg(feature = "build-metrics")]
709 self.metrics.persist(self);
710 }
711
712 /// Clear out `dir` if `input` is newer.
713 ///
714 /// After this executes, it will also ensure that `dir` exists.
715 fn clear_if_dirty(&self, dir: &Path, input: &Path) -> bool {
716 let stamp = dir.join(".stamp");
717 let mut cleared = false;
718 if mtime(&stamp) < mtime(input) {
719 self.verbose(&format!("Dirty - {}", dir.display()));
720 let _ = fs::remove_dir_all(dir);
721 cleared = true;
722 } else if stamp.exists() {
723 return cleared;
724 }
725 t!(fs::create_dir_all(dir));
726 t!(File::create(stamp));
727 cleared
728 }
729
730 /// Gets the space-separated set of activated features for the standard
731 /// library.
732 fn std_features(&self, target: TargetSelection) -> String {
733 let mut features = "panic-unwind".to_string();
734
735 match self.config.llvm_libunwind(target) {
736 LlvmLibunwind::InTree => features.push_str(" llvm-libunwind"),
737 LlvmLibunwind::System => features.push_str(" system-llvm-libunwind"),
738 LlvmLibunwind::No => {}
739 }
740 if self.config.backtrace {
741 features.push_str(" backtrace");
742 }
743 if self.config.profiler_enabled(target) {
744 features.push_str(" profiler");
745 }
746 features
747 }
748
749 /// Gets the space-separated set of activated features for the compiler.
750 fn rustc_features(&self, kind: Kind) -> String {
751 let mut features = vec![];
752 if self.config.jemalloc {
753 features.push("jemalloc");
754 }
755 if self.config.llvm_enabled() || kind == Kind::Check {
756 features.push("llvm");
757 }
758 // keep in sync with `bootstrap/compile.rs:rustc_cargo_env`
759 if self.config.rustc_parallel {
760 features.push("rustc_use_parallel_compiler");
761 }
762
763 // If debug logging is on, then we want the default for tracing:
764 // https://github.com/tokio-rs/tracing/blob/3dd5c03d907afdf2c39444a29931833335171554/tracing/src/level_filters.rs#L26
765 // which is everything (including debug/trace/etc.)
766 // if its unset, if debug_assertions is on, then debug_logging will also be on
767 // as well as tracing *ignoring* this feature when debug_assertions is on
768 if !self.config.rust_debug_logging {
769 features.push("max_level_info");
770 }
771
772 features.join(" ")
773 }
774
775 /// Component directory that Cargo will produce output into (e.g.
776 /// release/debug)
777 fn cargo_dir(&self) -> &'static str {
778 if self.config.rust_optimize { "release" } else { "debug" }
779 }
780
781 fn tools_dir(&self, compiler: Compiler) -> PathBuf {
782 let out = self
783 .out
784 .join(&*compiler.host.triple)
785 .join(format!("stage{}-tools-bin", compiler.stage));
786 t!(fs::create_dir_all(&out));
787 out
788 }
789
790 /// Returns the root directory for all output generated in a particular
791 /// stage when running with a particular host compiler.
792 ///
793 /// The mode indicates what the root directory is for.
794 fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf {
795 let suffix = match mode {
796 Mode::Std => "-std",
797 Mode::Rustc => "-rustc",
798 Mode::Codegen => "-codegen",
799 Mode::ToolBootstrap => "-bootstrap-tools",
800 Mode::ToolStd | Mode::ToolRustc => "-tools",
801 };
802 self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix))
803 }
804
805 /// Returns the root output directory for all Cargo output in a given stage,
806 /// running a particular compiler, whether or not we're building the
807 /// standard library, and targeting the specified architecture.
808 fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf {
809 self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir())
810 }
811
812 /// Root output directory for LLVM compiled for `target`
813 ///
814 /// Note that if LLVM is configured externally then the directory returned
815 /// will likely be empty.
816 fn llvm_out(&self, target: TargetSelection) -> PathBuf {
817 self.out.join(&*target.triple).join("llvm")
818 }
819
820 fn lld_out(&self, target: TargetSelection) -> PathBuf {
821 self.out.join(&*target.triple).join("lld")
822 }
823
824 /// Output directory for all documentation for a target
825 fn doc_out(&self, target: TargetSelection) -> PathBuf {
826 self.out.join(&*target.triple).join("doc")
827 }
828
829 fn test_out(&self, target: TargetSelection) -> PathBuf {
830 self.out.join(&*target.triple).join("test")
831 }
832
833 /// Output directory for all documentation for a target
834 fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf {
835 self.out.join(&*target.triple).join("compiler-doc")
836 }
837
838 /// Output directory for some generated md crate documentation for a target (temporary)
839 fn md_doc_out(&self, target: TargetSelection) -> Interned<PathBuf> {
840 INTERNER.intern_path(self.out.join(&*target.triple).join("md-doc"))
841 }
842
843 /// Returns `true` if no custom `llvm-config` is set for the specified target.
844 ///
845 /// If no custom `llvm-config` was specified then Rust's llvm will be used.
846 fn is_rust_llvm(&self, target: TargetSelection) -> bool {
847 if self.config.llvm_from_ci && target == self.config.build {
848 return true;
849 }
850
851 match self.config.target_config.get(&target) {
852 Some(ref c) => c.llvm_config.is_none(),
853 None => true,
854 }
855 }
856
857 /// Returns the path to `FileCheck` binary for the specified target
858 fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf {
859 let target_config = self.config.target_config.get(&target);
860 if let Some(s) = target_config.and_then(|c| c.llvm_filecheck.as_ref()) {
861 s.to_path_buf()
862 } else if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
863 let llvm_bindir = output(Command::new(s).arg("--bindir"));
864 let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", target));
865 if filecheck.exists() {
866 filecheck
867 } else {
868 // On Fedora the system LLVM installs FileCheck in the
869 // llvm subdirectory of the libdir.
870 let llvm_libdir = output(Command::new(s).arg("--libdir"));
871 let lib_filecheck =
872 Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", target));
873 if lib_filecheck.exists() {
874 lib_filecheck
875 } else {
876 // Return the most normal file name, even though
877 // it doesn't exist, so that any error message
878 // refers to that.
879 filecheck
880 }
881 }
882 } else {
883 let base = self.llvm_out(target).join("build");
884 let base = if !self.ninja() && target.contains("msvc") {
885 if self.config.llvm_optimize {
886 if self.config.llvm_release_debuginfo {
887 base.join("RelWithDebInfo")
888 } else {
889 base.join("Release")
890 }
891 } else {
892 base.join("Debug")
893 }
894 } else {
895 base
896 };
897 base.join("bin").join(exe("FileCheck", target))
898 }
899 }
900
901 /// Directory for libraries built from C/C++ code and shared between stages.
902 fn native_dir(&self, target: TargetSelection) -> PathBuf {
903 self.out.join(&*target.triple).join("native")
904 }
905
906 /// Root output directory for rust_test_helpers library compiled for
907 /// `target`
908 fn test_helpers_out(&self, target: TargetSelection) -> PathBuf {
909 self.native_dir(target).join("rust-test-helpers")
910 }
911
912 /// Adds the `RUST_TEST_THREADS` env var if necessary
913 fn add_rust_test_threads(&self, cmd: &mut Command) {
914 if env::var_os("RUST_TEST_THREADS").is_none() {
915 cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
916 }
917 }
918
919 /// Returns the libdir of the snapshot compiler.
920 fn rustc_snapshot_libdir(&self) -> PathBuf {
921 self.rustc_snapshot_sysroot().join(libdir(self.config.build))
922 }
923
924 /// Returns the sysroot of the snapshot compiler.
925 fn rustc_snapshot_sysroot(&self) -> &Path {
926 static SYSROOT_CACHE: OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
927 SYSROOT_CACHE.get_or_init(|| {
928 let mut rustc = Command::new(&self.initial_rustc);
929 rustc.args(&["--print", "sysroot"]);
930 output(&mut rustc).trim().into()
931 })
932 }
933
934 /// Runs a command, printing out nice contextual information if it fails.
935 fn run(&self, cmd: &mut Command) {
936 if self.config.dry_run {
937 return;
938 }
939 self.verbose(&format!("running: {:?}", cmd));
940 run(cmd, self.is_verbose())
941 }
942
943 /// Runs a command, printing out nice contextual information if it fails.
944 fn run_quiet(&self, cmd: &mut Command) {
945 if self.config.dry_run {
946 return;
947 }
948 self.verbose(&format!("running: {:?}", cmd));
949 run_suppressed(cmd)
950 }
951
952 /// Runs a command, printing out nice contextual information if it fails.
953 /// Exits if the command failed to execute at all, otherwise returns its
954 /// `status.success()`.
955 fn try_run(&self, cmd: &mut Command) -> bool {
956 if self.config.dry_run {
957 return true;
958 }
959 self.verbose(&format!("running: {:?}", cmd));
960 try_run(cmd, self.is_verbose())
961 }
962
963 /// Runs a command, printing out nice contextual information if it fails.
964 /// Exits if the command failed to execute at all, otherwise returns its
965 /// `status.success()`.
966 fn try_run_quiet(&self, cmd: &mut Command) -> bool {
967 if self.config.dry_run {
968 return true;
969 }
970 self.verbose(&format!("running: {:?}", cmd));
971 try_run_suppressed(cmd)
972 }
973
974 /// Runs a command, printing out nice contextual information if it fails.
975 /// Returns false if do not execute at all, otherwise returns its
976 /// `status.success()`.
977 fn check_run(&self, cmd: &mut Command) -> bool {
978 if self.config.dry_run {
979 return true;
980 }
981 self.verbose(&format!("running: {:?}", cmd));
982 check_run(cmd, self.is_verbose())
983 }
984
985 pub fn is_verbose(&self) -> bool {
986 self.verbosity > 0
987 }
988
989 /// Prints a message if this build is configured in verbose mode.
990 fn verbose(&self, msg: &str) {
991 if self.is_verbose() {
992 println!("{}", msg);
993 }
994 }
995
996 pub fn is_verbose_than(&self, level: usize) -> bool {
997 self.verbosity > level
998 }
999
1000 /// Prints a message if this build is configured in more verbose mode than `level`.
1001 fn verbose_than(&self, level: usize, msg: &str) {
1002 if self.is_verbose_than(level) {
1003 println!("{}", msg);
1004 }
1005 }
1006
1007 fn info(&self, msg: &str) {
1008 if self.config.dry_run {
1009 return;
1010 }
1011 println!("{}", msg);
1012 }
1013
1014 /// Returns the number of parallel jobs that have been configured for this
1015 /// build.
1016 fn jobs(&self) -> u32 {
1017 self.config.jobs.unwrap_or_else(|| num_cpus::get() as u32)
1018 }
1019
1020 fn debuginfo_map_to(&self, which: GitRepo) -> Option<String> {
1021 if !self.config.rust_remap_debuginfo {
1022 return None;
1023 }
1024
1025 match which {
1026 GitRepo::Rustc => {
1027 let sha = self.rust_sha().unwrap_or(&self.version);
1028 Some(format!("/rustc/{}", sha))
1029 }
1030 GitRepo::Llvm => Some(String::from("/rustc/llvm")),
1031 }
1032 }
1033
1034 /// Returns the path to the C compiler for the target specified.
1035 fn cc(&self, target: TargetSelection) -> &Path {
1036 self.cc[&target].path()
1037 }
1038
1039 /// Returns a list of flags to pass to the C compiler for the target
1040 /// specified.
1041 fn cflags(&self, target: TargetSelection, which: GitRepo, c: CLang) -> Vec<String> {
1042 let base = match c {
1043 CLang::C => &self.cc[&target],
1044 CLang::Cxx => &self.cxx[&target],
1045 };
1046
1047 // Filter out -O and /O (the optimization flags) that we picked up from
1048 // cc-rs because the build scripts will determine that for themselves.
1049 let mut base = base
1050 .args()
1051 .iter()
1052 .map(|s| s.to_string_lossy().into_owned())
1053 .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
1054 .collect::<Vec<String>>();
1055
1056 // If we're compiling on macOS then we add a few unconditional flags
1057 // indicating that we want libc++ (more filled out than libstdc++) and
1058 // we want to compile for 10.7. This way we can ensure that
1059 // LLVM/etc are all properly compiled.
1060 if target.contains("apple-darwin") {
1061 base.push("-stdlib=libc++".into());
1062 }
1063
1064 // Work around an apparently bad MinGW / GCC optimization,
1065 // See: https://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html
1066 // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936
1067 if &*target.triple == "i686-pc-windows-gnu" {
1068 base.push("-fno-omit-frame-pointer".into());
1069 }
1070
1071 if let Some(map_to) = self.debuginfo_map_to(which) {
1072 let map = format!("{}={}", self.src.display(), map_to);
1073 let cc = self.cc(target);
1074 if cc.ends_with("clang") || cc.ends_with("gcc") {
1075 base.push(format!("-fdebug-prefix-map={}", map));
1076 } else if cc.ends_with("clang-cl.exe") {
1077 base.push("-Xclang".into());
1078 base.push(format!("-fdebug-prefix-map={}", map));
1079 }
1080 }
1081 base
1082 }
1083
1084 /// Returns the path to the `ar` archive utility for the target specified.
1085 fn ar(&self, target: TargetSelection) -> Option<&Path> {
1086 self.ar.get(&target).map(|p| &**p)
1087 }
1088
1089 /// Returns the path to the `ranlib` utility for the target specified.
1090 fn ranlib(&self, target: TargetSelection) -> Option<&Path> {
1091 self.ranlib.get(&target).map(|p| &**p)
1092 }
1093
1094 /// Returns the path to the C++ compiler for the target specified.
1095 fn cxx(&self, target: TargetSelection) -> Result<&Path, String> {
1096 match self.cxx.get(&target) {
1097 Some(p) => Ok(p.path()),
1098 None => {
1099 Err(format!("target `{}` is not configured as a host, only as a target", target))
1100 }
1101 }
1102 }
1103
1104 /// Returns the path to the linker for the given target if it needs to be overridden.
1105 fn linker(&self, target: TargetSelection) -> Option<&Path> {
1106 if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.as_ref())
1107 {
1108 Some(linker)
1109 } else if target.contains("vxworks") {
1110 // need to use CXX compiler as linker to resolve the exception functions
1111 // that are only existed in CXX libraries
1112 Some(self.cxx[&target].path())
1113 } else if target != self.config.build
1114 && util::use_host_linker(target)
1115 && !target.contains("msvc")
1116 {
1117 Some(self.cc(target))
1118 } else if self.config.use_lld && !self.is_fuse_ld_lld(target) && self.build == target {
1119 Some(&self.initial_lld)
1120 } else {
1121 None
1122 }
1123 }
1124
1125 // LLD is used through `-fuse-ld=lld` rather than directly.
1126 // Only MSVC targets use LLD directly at the moment.
1127 fn is_fuse_ld_lld(&self, target: TargetSelection) -> bool {
1128 self.config.use_lld && !target.contains("msvc")
1129 }
1130
1131 fn lld_flags(&self, target: TargetSelection) -> impl Iterator<Item = String> {
1132 let mut options = [None, None];
1133
1134 if self.config.use_lld {
1135 if self.is_fuse_ld_lld(target) {
1136 options[0] = Some("-Clink-arg=-fuse-ld=lld".to_string());
1137 }
1138
1139 let threads = if target.contains("windows") { "/threads:1" } else { "--threads=1" };
1140 options[1] = Some(format!("-Clink-arg=-Wl,{}", threads));
1141 }
1142
1143 IntoIterator::into_iter(options).flatten()
1144 }
1145
1146 /// Returns if this target should statically link the C runtime, if specified
1147 fn crt_static(&self, target: TargetSelection) -> Option<bool> {
1148 if target.contains("pc-windows-msvc") {
1149 Some(true)
1150 } else {
1151 self.config.target_config.get(&target).and_then(|t| t.crt_static)
1152 }
1153 }
1154
1155 /// Returns the "musl root" for this `target`, if defined
1156 fn musl_root(&self, target: TargetSelection) -> Option<&Path> {
1157 self.config
1158 .target_config
1159 .get(&target)
1160 .and_then(|t| t.musl_root.as_ref())
1161 .or_else(|| self.config.musl_root.as_ref())
1162 .map(|p| &**p)
1163 }
1164
1165 /// Returns the "musl libdir" for this `target`.
1166 fn musl_libdir(&self, target: TargetSelection) -> Option<PathBuf> {
1167 let t = self.config.target_config.get(&target)?;
1168 if let libdir @ Some(_) = &t.musl_libdir {
1169 return libdir.clone();
1170 }
1171 self.musl_root(target).map(|root| root.join("lib"))
1172 }
1173
1174 /// Returns the sysroot for the wasi target, if defined
1175 fn wasi_root(&self, target: TargetSelection) -> Option<&Path> {
1176 self.config.target_config.get(&target).and_then(|t| t.wasi_root.as_ref()).map(|p| &**p)
1177 }
1178
1179 /// Returns `true` if this is a no-std `target`, if defined
1180 fn no_std(&self, target: TargetSelection) -> Option<bool> {
1181 self.config.target_config.get(&target).map(|t| t.no_std)
1182 }
1183
1184 /// Returns `true` if the target will be tested using the `remote-test-client`
1185 /// and `remote-test-server` binaries.
1186 fn remote_tested(&self, target: TargetSelection) -> bool {
1187 self.qemu_rootfs(target).is_some()
1188 || target.contains("android")
1189 || env::var_os("TEST_DEVICE_ADDR").is_some()
1190 }
1191
1192 /// Returns the root of the "rootfs" image that this target will be using,
1193 /// if one was configured.
1194 ///
1195 /// If `Some` is returned then that means that tests for this target are
1196 /// emulated with QEMU and binaries will need to be shipped to the emulator.
1197 fn qemu_rootfs(&self, target: TargetSelection) -> Option<&Path> {
1198 self.config.target_config.get(&target).and_then(|t| t.qemu_rootfs.as_ref()).map(|p| &**p)
1199 }
1200
1201 /// Path to the python interpreter to use
1202 fn python(&self) -> &Path {
1203 if self.config.build.ends_with("apple-darwin") {
1204 // Force /usr/bin/python3 on macOS for LLDB tests because we're loading the
1205 // LLDB plugin's compiled module which only works with the system python
1206 // (namely not Homebrew-installed python)
1207 Path::new("/usr/bin/python3")
1208 } else {
1209 self.config
1210 .python
1211 .as_ref()
1212 .expect("python is required for running LLDB or rustdoc tests")
1213 }
1214 }
1215
1216 /// Temporary directory that extended error information is emitted to.
1217 fn extended_error_dir(&self) -> PathBuf {
1218 self.out.join("tmp/extended-error-metadata")
1219 }
1220
1221 /// Tests whether the `compiler` compiling for `target` should be forced to
1222 /// use a stage1 compiler instead.
1223 ///
1224 /// Currently, by default, the build system does not perform a "full
1225 /// bootstrap" by default where we compile the compiler three times.
1226 /// Instead, we compile the compiler two times. The final stage (stage2)
1227 /// just copies the libraries from the previous stage, which is what this
1228 /// method detects.
1229 ///
1230 /// Here we return `true` if:
1231 ///
1232 /// * The build isn't performing a full bootstrap
1233 /// * The `compiler` is in the final stage, 2
1234 /// * We're not cross-compiling, so the artifacts are already available in
1235 /// stage1
1236 ///
1237 /// When all of these conditions are met the build will lift artifacts from
1238 /// the previous stage forward.
1239 fn force_use_stage1(&self, compiler: Compiler, target: TargetSelection) -> bool {
1240 !self.config.full_bootstrap
1241 && compiler.stage >= 2
1242 && (self.hosts.iter().any(|h| *h == target) || target == self.build)
1243 }
1244
1245 /// Given `num` in the form "a.b.c" return a "release string" which
1246 /// describes the release version number.
1247 ///
1248 /// For example on nightly this returns "a.b.c-nightly", on beta it returns
1249 /// "a.b.c-beta.1" and on stable it just returns "a.b.c".
1250 fn release(&self, num: &str) -> String {
1251 match &self.config.channel[..] {
1252 "stable" => num.to_string(),
1253 "beta" => {
1254 if self.rust_info.is_git() && !self.config.ignore_git {
1255 format!("{}-beta.{}", num, self.beta_prerelease_version())
1256 } else {
1257 format!("{}-beta", num)
1258 }
1259 }
1260 "nightly" => format!("{}-nightly", num),
1261 _ => format!("{}-dev", num),
1262 }
1263 }
1264
1265 fn beta_prerelease_version(&self) -> u32 {
1266 if let Some(s) = self.prerelease_version.get() {
1267 return s;
1268 }
1269
1270 // Figure out how many merge commits happened since we branched off master.
1271 // That's our beta number!
1272 // (Note that we use a `..` range, not the `...` symmetric difference.)
1273 let count = output(
1274 Command::new("git")
1275 .arg("rev-list")
1276 .arg("--count")
1277 .arg("--merges")
1278 .arg("refs/remotes/origin/master..HEAD")
1279 .current_dir(&self.src),
1280 );
1281 let n = count.trim().parse().unwrap();
1282 self.prerelease_version.set(Some(n));
1283 n
1284 }
1285
1286 /// Returns the value of `release` above for Rust itself.
1287 fn rust_release(&self) -> String {
1288 self.release(&self.version)
1289 }
1290
1291 /// Returns the "package version" for a component given the `num` release
1292 /// number.
1293 ///
1294 /// The package version is typically what shows up in the names of tarballs.
1295 /// For channels like beta/nightly it's just the channel name, otherwise
1296 /// it's the `num` provided.
1297 fn package_vers(&self, num: &str) -> String {
1298 match &self.config.channel[..] {
1299 "stable" => num.to_string(),
1300 "beta" => "beta".to_string(),
1301 "nightly" => "nightly".to_string(),
1302 _ => format!("{}-dev", num),
1303 }
1304 }
1305
1306 /// Returns the value of `package_vers` above for Rust itself.
1307 fn rust_package_vers(&self) -> String {
1308 self.package_vers(&self.version)
1309 }
1310
1311 fn llvm_link_tools_dynamically(&self, target: TargetSelection) -> bool {
1312 target.contains("linux-gnu") || target.contains("apple-darwin")
1313 }
1314
1315 /// Returns the `version` string associated with this compiler for Rust
1316 /// itself.
1317 ///
1318 /// Note that this is a descriptive string which includes the commit date,
1319 /// sha, version, etc.
1320 fn rust_version(&self) -> String {
1321 let mut version = self.rust_info.version(self, &self.version);
1322 if let Some(ref s) = self.config.description {
1323 version.push_str(" (");
1324 version.push_str(s);
1325 version.push(')');
1326 }
1327 version
1328 }
1329
1330 /// Returns the full commit hash.
1331 fn rust_sha(&self) -> Option<&str> {
1332 self.rust_info.sha()
1333 }
1334
1335 /// Returns the `a.b.c` version that the given package is at.
1336 fn release_num(&self, package: &str) -> String {
1337 let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package));
1338 let toml = t!(fs::read_to_string(&toml_file_name));
1339 for line in toml.lines() {
1340 if let Some(stripped) =
1341 line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\""))
1342 {
1343 return stripped.to_owned();
1344 }
1345 }
1346
1347 panic!("failed to find version in {}'s Cargo.toml", package)
1348 }
1349
1350 /// Returns `true` if unstable features should be enabled for the compiler
1351 /// we're building.
1352 fn unstable_features(&self) -> bool {
1353 match &self.config.channel[..] {
1354 "stable" | "beta" => false,
1355 "nightly" | _ => true,
1356 }
1357 }
1358
1359 /// Returns a Vec of all the dependencies of the given root crate,
1360 /// including transitive dependencies and the root itself. Only includes
1361 /// "local" crates (those in the local source tree, not from a registry).
1362 fn in_tree_crates(&self, root: &str, target: Option<TargetSelection>) -> Vec<&Crate> {
1363 let mut ret = Vec::new();
1364 let mut list = vec![INTERNER.intern_str(root)];
1365 let mut visited = HashSet::new();
1366 while let Some(krate) = list.pop() {
1367 let krate = &self.crates[&krate];
1368 ret.push(krate);
1369 for dep in &krate.deps {
1370 if !self.crates.contains_key(dep) {
1371 // Ignore non-workspace members.
1372 continue;
1373 }
1374 // Don't include optional deps if their features are not
1375 // enabled. Ideally this would be computed from `cargo
1376 // metadata --features …`, but that is somewhat slow. In
1377 // the future, we may want to consider just filtering all
1378 // build and dev dependencies in metadata::build.
1379 if visited.insert(dep)
1380 && (dep != "profiler_builtins"
1381 || target
1382 .map(|t| self.config.profiler_enabled(t))
1383 .unwrap_or_else(|| self.config.any_profiler_enabled()))
1384 && (dep != "rustc_codegen_llvm" || self.config.llvm_enabled())
1385 {
1386 list.push(*dep);
1387 }
1388 }
1389 }
1390 ret
1391 }
1392
1393 fn read_stamp_file(&self, stamp: &Path) -> Vec<(PathBuf, DependencyType)> {
1394 if self.config.dry_run {
1395 return Vec::new();
1396 }
1397
1398 let mut paths = Vec::new();
1399 let contents = t!(fs::read(stamp), &stamp);
1400 // This is the method we use for extracting paths from the stamp file passed to us. See
1401 // run_cargo for more information (in compile.rs).
1402 for part in contents.split(|b| *b == 0) {
1403 if part.is_empty() {
1404 continue;
1405 }
1406 let dependency_type = match part[0] as char {
1407 'h' => DependencyType::Host,
1408 's' => DependencyType::TargetSelfContained,
1409 't' => DependencyType::Target,
1410 _ => unreachable!(),
1411 };
1412 let path = PathBuf::from(t!(str::from_utf8(&part[1..])));
1413 paths.push((path, dependency_type));
1414 }
1415 paths
1416 }
1417
1418 /// Create a temporary directory in `out` and return its path.
1419 ///
1420 /// NOTE: this temporary directory is shared between all steps;
1421 /// if you need an empty directory, create a new subdirectory inside it.
1422 fn tempdir(&self) -> PathBuf {
1423 let tmp = self.out.join("tmp");
1424 t!(fs::create_dir_all(&tmp));
1425 tmp
1426 }
1427
1428 /// Copies a file from `src` to `dst`
1429 pub fn copy(&self, src: &Path, dst: &Path) {
1430 self.copy_internal(src, dst, false);
1431 }
1432
1433 fn copy_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) {
1434 if self.config.dry_run {
1435 return;
1436 }
1437 self.verbose_than(1, &format!("Copy {:?} to {:?}", src, dst));
1438 if src == dst {
1439 return;
1440 }
1441 let _ = fs::remove_file(&dst);
1442 let metadata = t!(src.symlink_metadata());
1443 let mut src = src.to_path_buf();
1444 if metadata.file_type().is_symlink() {
1445 if dereference_symlinks {
1446 src = t!(fs::canonicalize(src));
1447 } else {
1448 let link = t!(fs::read_link(src));
1449 t!(symlink_file(link, dst));
1450 return;
1451 }
1452 }
1453 if let Ok(()) = fs::hard_link(&src, dst) {
1454 // Attempt to "easy copy" by creating a hard link
1455 // (symlinks don't work on windows), but if that fails
1456 // just fall back to a slow `copy` operation.
1457 } else {
1458 if let Err(e) = fs::copy(&src, dst) {
1459 panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e)
1460 }
1461 t!(fs::set_permissions(dst, metadata.permissions()));
1462 let atime = FileTime::from_last_access_time(&metadata);
1463 let mtime = FileTime::from_last_modification_time(&metadata);
1464 t!(filetime::set_file_times(dst, atime, mtime));
1465 }
1466 }
1467
1468 /// Copies the `src` directory recursively to `dst`. Both are assumed to exist
1469 /// when this function is called.
1470 pub fn cp_r(&self, src: &Path, dst: &Path) {
1471 if self.config.dry_run {
1472 return;
1473 }
1474 for f in self.read_dir(src) {
1475 let path = f.path();
1476 let name = path.file_name().unwrap();
1477 let dst = dst.join(name);
1478 if t!(f.file_type()).is_dir() {
1479 t!(fs::create_dir_all(&dst));
1480 self.cp_r(&path, &dst);
1481 } else {
1482 let _ = fs::remove_file(&dst);
1483 self.copy(&path, &dst);
1484 }
1485 }
1486 }
1487
1488 /// Copies the `src` directory recursively to `dst`. Both are assumed to exist
1489 /// when this function is called. Unwanted files or directories can be skipped
1490 /// by returning `false` from the filter function.
1491 pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) {
1492 // Immediately recurse with an empty relative path
1493 self.recurse_(src, dst, Path::new(""), filter)
1494 }
1495
1496 // Inner function does the actual work
1497 fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &dyn Fn(&Path) -> bool) {
1498 for f in self.read_dir(src) {
1499 let path = f.path();
1500 let name = path.file_name().unwrap();
1501 let dst = dst.join(name);
1502 let relative = relative.join(name);
1503 // Only copy file or directory if the filter function returns true
1504 if filter(&relative) {
1505 if t!(f.file_type()).is_dir() {
1506 let _ = fs::remove_dir_all(&dst);
1507 self.create_dir(&dst);
1508 self.recurse_(&path, &dst, &relative, filter);
1509 } else {
1510 let _ = fs::remove_file(&dst);
1511 self.copy(&path, &dst);
1512 }
1513 }
1514 }
1515 }
1516
1517 fn copy_to_folder(&self, src: &Path, dest_folder: &Path) {
1518 let file_name = src.file_name().unwrap();
1519 let dest = dest_folder.join(file_name);
1520 self.copy(src, &dest);
1521 }
1522
1523 fn install(&self, src: &Path, dstdir: &Path, perms: u32) {
1524 if self.config.dry_run {
1525 return;
1526 }
1527 let dst = dstdir.join(src.file_name().unwrap());
1528 self.verbose_than(1, &format!("Install {:?} to {:?}", src, dst));
1529 t!(fs::create_dir_all(dstdir));
1530 if !src.exists() {
1531 panic!("Error: File \"{}\" not found!", src.display());
1532 }
1533 self.copy_internal(src, &dst, true);
1534 chmod(&dst, perms);
1535 }
1536
1537 fn create(&self, path: &Path, s: &str) {
1538 if self.config.dry_run {
1539 return;
1540 }
1541 t!(fs::write(path, s));
1542 }
1543
1544 fn read(&self, path: &Path) -> String {
1545 if self.config.dry_run {
1546 return String::new();
1547 }
1548 t!(fs::read_to_string(path))
1549 }
1550
1551 fn create_dir(&self, dir: &Path) {
1552 if self.config.dry_run {
1553 return;
1554 }
1555 t!(fs::create_dir_all(dir))
1556 }
1557
1558 fn remove_dir(&self, dir: &Path) {
1559 if self.config.dry_run {
1560 return;
1561 }
1562 t!(fs::remove_dir_all(dir))
1563 }
1564
1565 fn read_dir(&self, dir: &Path) -> impl Iterator<Item = fs::DirEntry> {
1566 let iter = match fs::read_dir(dir) {
1567 Ok(v) => v,
1568 Err(_) if self.config.dry_run => return vec![].into_iter(),
1569 Err(err) => panic!("could not read dir {:?}: {:?}", dir, err),
1570 };
1571 iter.map(|e| t!(e)).collect::<Vec<_>>().into_iter()
1572 }
1573
1574 fn remove(&self, f: &Path) {
1575 if self.config.dry_run {
1576 return;
1577 }
1578 fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {:?}", f));
1579 }
1580
1581 /// Returns if config.ninja is enabled, and checks for ninja existence,
1582 /// exiting with a nicer error message if not.
1583 fn ninja(&self) -> bool {
1584 let mut cmd_finder = crate::sanity::Finder::new();
1585
1586 if self.config.ninja_in_file {
1587 // Some Linux distros rename `ninja` to `ninja-build`.
1588 // CMake can work with either binary name.
1589 if cmd_finder.maybe_have("ninja-build").is_none()
1590 && cmd_finder.maybe_have("ninja").is_none()
1591 {
1592 eprintln!(
1593 "
1594 Couldn't find required command: ninja (or ninja-build)
1595
1596 You should install ninja as described at
1597 <https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages>,
1598 or set `ninja = false` in the `[llvm]` section of `config.toml`.
1599 Alternatively, set `download-ci-llvm = true` in that `[llvm]` section
1600 to download LLVM rather than building it.
1601 "
1602 );
1603 std::process::exit(1);
1604 }
1605 }
1606
1607 // If ninja isn't enabled but we're building for MSVC then we try
1608 // doubly hard to enable it. It was realized in #43767 that the msbuild
1609 // CMake generator for MSVC doesn't respect configuration options like
1610 // disabling LLVM assertions, which can often be quite important!
1611 //
1612 // In these cases we automatically enable Ninja if we find it in the
1613 // environment.
1614 if !self.config.ninja_in_file && self.config.build.contains("msvc") {
1615 if cmd_finder.maybe_have("ninja").is_some() {
1616 return true;
1617 }
1618 }
1619
1620 self.config.ninja_in_file
1621 }
1622 }
1623
1624 #[cfg(unix)]
1625 fn chmod(path: &Path, perms: u32) {
1626 use std::os::unix::fs::*;
1627 t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
1628 }
1629 #[cfg(windows)]
1630 fn chmod(_path: &Path, _perms: u32) {}
1631
1632 impl Compiler {
1633 pub fn with_stage(mut self, stage: u32) -> Compiler {
1634 self.stage = stage;
1635 self
1636 }
1637
1638 /// Returns `true` if this is a snapshot compiler for `build`'s configuration
1639 pub fn is_snapshot(&self, build: &Build) -> bool {
1640 self.stage == 0 && self.host == build.build
1641 }
1642
1643 /// Returns if this compiler should be treated as a final stage one in the
1644 /// current build session.
1645 /// This takes into account whether we're performing a full bootstrap or
1646 /// not; don't directly compare the stage with `2`!
1647 pub fn is_final_stage(&self, build: &Build) -> bool {
1648 let final_stage = if build.config.full_bootstrap { 2 } else { 1 };
1649 self.stage >= final_stage
1650 }
1651 }
1652
1653 fn envify(s: &str) -> String {
1654 s.chars()
1655 .map(|c| match c {
1656 '-' => '_',
1657 c => c,
1658 })
1659 .flat_map(|c| c.to_uppercase())
1660 .collect()
1661 }