]> git.proxmox.com Git - rustc.git/blob - src/bootstrap/lib.rs
New upstream version 1.68.2+dfsg1
[rustc.git] / src / bootstrap / lib.rs
1 //! Implementation of rustbuild, the Rust build system.
2 //!
3 //! This module, and its descendants, are the implementation of the Rust build
4 //! system. Most of this build system is backed by Cargo but the outer layer
5 //! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
6 //! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
7 //!
8 //! * To be an easily understandable, easily extensible, and maintainable build
9 //! system.
10 //! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
11 //! crates.io and Cargo.
12 //! * A standard interface to build across all platforms, including MSVC
13 //!
14 //! ## Architecture
15 //!
16 //! The build system defers most of the complicated logic managing invocations
17 //! of rustc and rustdoc to Cargo itself. However, moving through various stages
18 //! and copying artifacts is still necessary for it to do. Each time rustbuild
19 //! is invoked, it will iterate through the list of predefined steps and execute
20 //! each serially in turn if it matches the paths passed or is a default rule.
21 //! For each step rustbuild relies on the step internally being incremental and
22 //! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
23 //! to appropriate test harnesses and such.
24 //!
25 //! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
26 //! have its own parallelism and incremental management. Later steps, like
27 //! tests, aren't incremental and simply run the entire suite currently.
28 //! However, compiletest itself tries to avoid running tests when the artifacts
29 //! that are involved (mainly the compiler) haven't changed.
30 //!
31 //! When you execute `x.py build`, the steps executed are:
32 //!
33 //! * First, the python script is run. This will automatically download the
34 //! stage0 rustc and cargo according to `src/stage0.json`, or use the cached
35 //! versions if they're available. These are then used to compile rustbuild
36 //! itself (using Cargo). Finally, control is then transferred to rustbuild.
37 //!
38 //! * Rustbuild takes over, performs sanity checks, probes the environment,
39 //! reads configuration, and starts executing steps as it reads the command
40 //! line arguments (paths) or going through the default rules.
41 //!
42 //! The build output will be something like the following:
43 //!
44 //! Building stage0 std artifacts
45 //! Copying stage0 std
46 //! Building stage0 test artifacts
47 //! Copying stage0 test
48 //! Building stage0 compiler artifacts
49 //! Copying stage0 rustc
50 //! Assembling stage1 compiler
51 //! Building stage1 std artifacts
52 //! Copying stage1 std
53 //! Building stage1 test artifacts
54 //! Copying stage1 test
55 //! Building stage1 compiler artifacts
56 //! Copying stage1 rustc
57 //! Assembling stage2 compiler
58 //! Uplifting stage1 std
59 //! Uplifting stage1 test
60 //! Uplifting stage1 rustc
61 //!
62 //! Let's disect that a little:
63 //!
64 //! ## Building stage0 {std,test,compiler} artifacts
65 //!
66 //! These steps use the provided (downloaded, usually) compiler to compile the
67 //! local Rust source into libraries we can use.
68 //!
69 //! ## Copying stage0 {std,test,rustc}
70 //!
71 //! This copies the build output from Cargo into
72 //! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: this step's
73 //! documentation should be expanded -- the information already here may be
74 //! incorrect.
75 //!
76 //! ## Assembling stage1 compiler
77 //!
78 //! This copies the libraries we built in "building stage0 ... artifacts" into
79 //! the stage1 compiler's lib directory. These are the host libraries that the
80 //! compiler itself uses to run. These aren't actually used by artifacts the new
81 //! compiler generates. This step also copies the rustc and rustdoc binaries we
82 //! generated into build/$HOST/stage/bin.
83 //!
84 //! The stage1/bin/rustc is a fully functional compiler, but it doesn't yet have
85 //! any libraries to link built binaries or libraries to. The next 3 steps will
86 //! provide those libraries for it; they are mostly equivalent to constructing
87 //! the stage1/bin compiler so we don't go through them individually.
88 //!
89 //! ## Uplifting stage1 {std,test,rustc}
90 //!
91 //! This step copies the libraries from the stage1 compiler sysroot into the
92 //! stage2 compiler. This is done to avoid rebuilding the compiler; libraries
93 //! we'd build in this step should be identical (in function, if not necessarily
94 //! identical on disk) so there's no need to recompile the compiler again. Note
95 //! that if you want to, you can enable the full-bootstrap option to change this
96 //! behavior.
97 //!
98 //! Each step is driven by a separate Cargo project and rustbuild orchestrates
99 //! copying files between steps and otherwise preparing for Cargo to run.
100 //!
101 //! ## Further information
102 //!
103 //! More documentation can be found in each respective module below, and you can
104 //! also check out the `src/bootstrap/README.md` file for more information.
105
106 use std::cell::{Cell, RefCell};
107 use std::collections::{HashMap, HashSet};
108 use std::env;
109 use std::fs::{self, File};
110 use std::io;
111 use std::io::ErrorKind;
112 use std::path::{Path, PathBuf};
113 use std::process::Command;
114 use std::str;
115
116 use build_helper::ci::CiEnv;
117 use channel::GitInfo;
118 use config::{DryRun, Target};
119 use filetime::FileTime;
120 use once_cell::sync::OnceCell;
121
122 use crate::builder::Kind;
123 use crate::config::{LlvmLibunwind, TargetSelection};
124 use crate::util::{
125 exe, libdir, mtime, output, run, run_suppressed, symlink_dir, try_run_suppressed,
126 };
127
128 mod bolt;
129 mod builder;
130 mod cache;
131 mod cc_detect;
132 mod channel;
133 mod check;
134 mod clean;
135 mod compile;
136 mod config;
137 mod dist;
138 mod doc;
139 mod download;
140 mod flags;
141 mod format;
142 mod install;
143 mod metadata;
144 mod native;
145 mod run;
146 mod sanity;
147 mod setup;
148 mod tarball;
149 mod test;
150 mod tool;
151 mod toolstate;
152 pub mod util;
153
154 #[cfg(feature = "build-metrics")]
155 mod metrics;
156
157 #[cfg(windows)]
158 mod job;
159
160 #[cfg(all(unix, not(target_os = "haiku")))]
161 mod job {
162 pub unsafe fn setup(build: &mut crate::Build) {
163 if build.config.low_priority {
164 libc::setpriority(libc::PRIO_PGRP as _, 0, 10);
165 }
166 }
167 }
168
169 #[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))]
170 mod job {
171 pub unsafe fn setup(_build: &mut crate::Build) {}
172 }
173
174 pub use crate::builder::PathSet;
175 use crate::cache::{Interned, INTERNER};
176 pub use crate::config::Config;
177 pub use crate::flags::Subcommand;
178
179 const LLVM_TOOLS: &[&str] = &[
180 "llvm-cov", // used to generate coverage report
181 "llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility
182 "llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
183 "llvm-objdump", // used to disassemble programs
184 "llvm-profdata", // used to inspect and merge files generated by profiles
185 "llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide
186 "llvm-size", // used to prints the size of the linker sections of a program
187 "llvm-strip", // used to discard symbols from binary files to reduce their size
188 "llvm-ar", // used for creating and modifying archive files
189 "llvm-as", // used to convert LLVM assembly to LLVM bitcode
190 "llvm-dis", // used to disassemble LLVM bitcode
191 "llc", // used to compile LLVM bytecode
192 "opt", // used to optimize LLVM bytecode
193 ];
194
195 /// LLD file names for all flavors.
196 const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"];
197
198 pub const VERSION: usize = 2;
199
200 /// Extra --check-cfg to add when building
201 /// (Mode restriction, config name, config values (if any))
202 const EXTRA_CHECK_CFGS: &[(Option<Mode>, &'static str, Option<&[&'static str]>)] = &[
203 (None, "bootstrap", None),
204 (Some(Mode::Rustc), "parallel_compiler", None),
205 (Some(Mode::ToolRustc), "parallel_compiler", None),
206 (Some(Mode::ToolRustc), "emulate_second_only_system", None),
207 (Some(Mode::Codegen), "parallel_compiler", None),
208 (Some(Mode::Std), "stdarch_intel_sde", None),
209 (Some(Mode::Std), "no_fp_fmt_parse", None),
210 (Some(Mode::Std), "no_global_oom_handling", None),
211 (Some(Mode::Std), "no_rc", None),
212 (Some(Mode::Std), "no_sync", None),
213 (Some(Mode::Std), "freebsd12", None),
214 (Some(Mode::Std), "backtrace_in_libstd", None),
215 /* Extra values not defined in the built-in targets yet, but used in std */
216 (Some(Mode::Std), "target_env", Some(&["libnx"])),
217 (Some(Mode::Std), "target_os", Some(&["watchos"])),
218 (
219 Some(Mode::Std),
220 "target_arch",
221 Some(&["asmjs", "spirv", "nvptx", "nvptx64", "le32", "xtensa"]),
222 ),
223 /* Extra names used by dependencies */
224 // FIXME: Used by rustfmt is their test but is invalid (neither cargo nor bootstrap ever set
225 // this config) should probably by removed or use a allow attribute.
226 (Some(Mode::ToolRustc), "release", None),
227 // FIXME: Used by stdarch in their test, should use a allow attribute instead.
228 (Some(Mode::Std), "dont_compile_me", None),
229 // FIXME: Used by serde_json, but we should not be triggering on external dependencies.
230 (Some(Mode::Rustc), "no_btreemap_remove_entry", None),
231 (Some(Mode::ToolRustc), "no_btreemap_remove_entry", None),
232 // FIXME: Used by crossbeam-utils, but we should not be triggering on external dependencies.
233 (Some(Mode::Rustc), "crossbeam_loom", None),
234 (Some(Mode::ToolRustc), "crossbeam_loom", None),
235 // FIXME: Used by proc-macro2, but we should not be triggering on external dependencies.
236 (Some(Mode::Rustc), "span_locations", None),
237 (Some(Mode::ToolRustc), "span_locations", None),
238 // Can be passed in RUSTFLAGS to prevent direct syscalls in rustix.
239 (None, "rustix_use_libc", None),
240 ];
241
242 /// A structure representing a Rust compiler.
243 ///
244 /// Each compiler has a `stage` that it is associated with and a `host` that
245 /// corresponds to the platform the compiler runs on. This structure is used as
246 /// a parameter to many methods below.
247 #[derive(Eq, PartialOrd, Ord, PartialEq, Clone, Copy, Hash, Debug)]
248 pub struct Compiler {
249 stage: u32,
250 host: TargetSelection,
251 }
252
253 #[derive(PartialEq, Eq, Copy, Clone, Debug)]
254 pub enum DocTests {
255 /// Run normal tests and doc tests (default).
256 Yes,
257 /// Do not run any doc tests.
258 No,
259 /// Only run doc tests.
260 Only,
261 }
262
263 pub enum GitRepo {
264 Rustc,
265 Llvm,
266 }
267
268 /// Global configuration for the build system.
269 ///
270 /// This structure transitively contains all configuration for the build system.
271 /// All filesystem-encoded configuration is in `config`, all flags are in
272 /// `flags`, and then parsed or probed information is listed in the keys below.
273 ///
274 /// This structure is a parameter of almost all methods in the build system,
275 /// although most functions are implemented as free functions rather than
276 /// methods specifically on this structure itself (to make it easier to
277 /// organize).
278 pub struct Build {
279 /// User-specified configuration from `config.toml`.
280 config: Config,
281
282 // Version information
283 version: String,
284
285 // Properties derived from the above configuration
286 src: PathBuf,
287 out: PathBuf,
288 bootstrap_out: PathBuf,
289 cargo_info: channel::GitInfo,
290 rust_analyzer_info: channel::GitInfo,
291 clippy_info: channel::GitInfo,
292 miri_info: channel::GitInfo,
293 rustfmt_info: channel::GitInfo,
294 in_tree_llvm_info: channel::GitInfo,
295 local_rebuild: bool,
296 fail_fast: bool,
297 doc_tests: DocTests,
298 verbosity: usize,
299
300 // Targets for which to build
301 build: TargetSelection,
302 hosts: Vec<TargetSelection>,
303 targets: Vec<TargetSelection>,
304
305 initial_rustc: PathBuf,
306 initial_cargo: PathBuf,
307 initial_lld: PathBuf,
308 initial_libdir: PathBuf,
309
310 // Runtime state filled in later on
311 // C/C++ compilers and archiver for all targets
312 cc: HashMap<TargetSelection, cc::Tool>,
313 cxx: HashMap<TargetSelection, cc::Tool>,
314 ar: HashMap<TargetSelection, PathBuf>,
315 ranlib: HashMap<TargetSelection, PathBuf>,
316 // Miscellaneous
317 // allow bidirectional lookups: both name -> path and path -> name
318 crates: HashMap<Interned<String>, Crate>,
319 crate_paths: HashMap<PathBuf, Interned<String>>,
320 is_sudo: bool,
321 ci_env: CiEnv,
322 delayed_failures: RefCell<Vec<String>>,
323 prerelease_version: Cell<Option<u32>>,
324 tool_artifacts:
325 RefCell<HashMap<TargetSelection, HashMap<String, (&'static str, PathBuf, Vec<String>)>>>,
326
327 #[cfg(feature = "build-metrics")]
328 metrics: metrics::BuildMetrics,
329 }
330
331 #[derive(Debug)]
332 struct Crate {
333 name: Interned<String>,
334 deps: HashSet<Interned<String>>,
335 path: PathBuf,
336 }
337
338 impl Crate {
339 fn local_path(&self, build: &Build) -> PathBuf {
340 self.path.strip_prefix(&build.config.src).unwrap().into()
341 }
342 }
343
344 /// When building Rust various objects are handled differently.
345 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
346 pub enum DependencyType {
347 /// Libraries originating from proc-macros.
348 Host,
349 /// Typical Rust libraries.
350 Target,
351 /// Non Rust libraries and objects shipped to ease usage of certain targets.
352 TargetSelfContained,
353 }
354
355 /// The various "modes" of invoking Cargo.
356 ///
357 /// These entries currently correspond to the various output directories of the
358 /// build system, with each mod generating output in a different directory.
359 #[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
360 pub enum Mode {
361 /// Build the standard library, placing output in the "stageN-std" directory.
362 Std,
363
364 /// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory.
365 Rustc,
366
367 /// Build a codegen backend for rustc, placing the output in the "stageN-codegen" directory.
368 Codegen,
369
370 /// Build a tool, placing output in the "stage0-bootstrap-tools"
371 /// directory. This is for miscellaneous sets of tools that are built
372 /// using the bootstrap stage0 compiler in its entirety (target libraries
373 /// and all). Typically these tools compile with stable Rust.
374 ToolBootstrap,
375
376 /// Build a tool which uses the locally built std, placing output in the
377 /// "stageN-tools" directory. Its usage is quite rare, mainly used by
378 /// compiletest which needs libtest.
379 ToolStd,
380
381 /// Build a tool which uses the locally built rustc and the target std,
382 /// placing the output in the "stageN-tools" directory. This is used for
383 /// anything that needs a fully functional rustc, such as rustdoc, clippy,
384 /// cargo, rls, rustfmt, miri, etc.
385 ToolRustc,
386 }
387
388 impl Mode {
389 pub fn is_tool(&self) -> bool {
390 matches!(self, Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd)
391 }
392
393 pub fn must_support_dlopen(&self) -> bool {
394 matches!(self, Mode::Std | Mode::Codegen)
395 }
396 }
397
398 pub enum CLang {
399 C,
400 Cxx,
401 }
402
403 macro_rules! forward {
404 ( $( $fn:ident( $($param:ident: $ty:ty),* ) $( -> $ret:ty)? ),+ $(,)? ) => {
405 impl Build {
406 $( fn $fn(&self, $($param: $ty),* ) $( -> $ret)? {
407 self.config.$fn( $($param),* )
408 } )+
409 }
410 }
411 }
412
413 forward! {
414 verbose(msg: &str),
415 is_verbose() -> bool,
416 create(path: &Path, s: &str),
417 remove(f: &Path),
418 tempdir() -> PathBuf,
419 try_run(cmd: &mut Command) -> bool,
420 llvm_link_shared() -> bool,
421 download_rustc() -> bool,
422 initial_rustfmt() -> Option<PathBuf>,
423 }
424
425 impl Build {
426 /// Creates a new set of build configuration from the `flags` on the command
427 /// line and the filesystem `config`.
428 ///
429 /// By default all build output will be placed in the current directory.
430 pub fn new(mut config: Config) -> Build {
431 let src = config.src.clone();
432 let out = config.out.clone();
433
434 #[cfg(unix)]
435 // keep this consistent with the equivalent check in x.py:
436 // https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/bootstrap.py#L796-L797
437 let is_sudo = match env::var_os("SUDO_USER") {
438 Some(_sudo_user) => {
439 let uid = unsafe { libc::getuid() };
440 uid == 0
441 }
442 None => false,
443 };
444 #[cfg(not(unix))]
445 let is_sudo = false;
446
447 let ignore_git = config.ignore_git;
448 let rust_info = channel::GitInfo::new(ignore_git, &src);
449 let cargo_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/cargo"));
450 let rust_analyzer_info =
451 channel::GitInfo::new(ignore_git, &src.join("src/tools/rust-analyzer"));
452 let clippy_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/clippy"));
453 let miri_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/miri"));
454 let rustfmt_info = channel::GitInfo::new(ignore_git, &src.join("src/tools/rustfmt"));
455
456 // we always try to use git for LLVM builds
457 let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
458
459 let initial_target_libdir_str = if config.dry_run() {
460 "/dummy/lib/path/to/lib/".to_string()
461 } else {
462 output(
463 Command::new(&config.initial_rustc)
464 .arg("--target")
465 .arg(config.build.rustc_target_arg())
466 .arg("--print")
467 .arg("target-libdir"),
468 )
469 };
470 let initial_target_dir = Path::new(&initial_target_libdir_str).parent().unwrap();
471 let initial_lld = initial_target_dir.join("bin").join("rust-lld");
472
473 let initial_sysroot = if config.dry_run() {
474 "/dummy".to_string()
475 } else {
476 output(Command::new(&config.initial_rustc).arg("--print").arg("sysroot"))
477 };
478 let initial_libdir = initial_target_dir
479 .parent()
480 .unwrap()
481 .parent()
482 .unwrap()
483 .strip_prefix(initial_sysroot.trim())
484 .unwrap()
485 .to_path_buf();
486
487 let version = std::fs::read_to_string(src.join("src").join("version"))
488 .expect("failed to read src/version");
489 let version = version.trim();
490
491 let bootstrap_out = std::env::current_exe()
492 .expect("could not determine path to running process")
493 .parent()
494 .unwrap()
495 .to_path_buf();
496 if !bootstrap_out.join(exe("rustc", config.build)).exists() && !cfg!(test) {
497 // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
498 panic!(
499 "`rustc` not found in {}, run `cargo build --bins` before `cargo run`",
500 bootstrap_out.display()
501 )
502 }
503
504 if rust_info.is_from_tarball() && config.description.is_none() {
505 config.description = Some("built from a source tarball".to_owned());
506 }
507
508 let mut build = Build {
509 initial_rustc: config.initial_rustc.clone(),
510 initial_cargo: config.initial_cargo.clone(),
511 initial_lld,
512 initial_libdir,
513 local_rebuild: config.local_rebuild,
514 fail_fast: config.cmd.fail_fast(),
515 doc_tests: config.cmd.doc_tests(),
516 verbosity: config.verbose,
517
518 build: config.build,
519 hosts: config.hosts.clone(),
520 targets: config.targets.clone(),
521
522 config,
523 version: version.to_string(),
524 src,
525 out,
526 bootstrap_out,
527
528 cargo_info,
529 rust_analyzer_info,
530 clippy_info,
531 miri_info,
532 rustfmt_info,
533 in_tree_llvm_info,
534 cc: HashMap::new(),
535 cxx: HashMap::new(),
536 ar: HashMap::new(),
537 ranlib: HashMap::new(),
538 crates: HashMap::new(),
539 crate_paths: HashMap::new(),
540 is_sudo,
541 ci_env: CiEnv::current(),
542 delayed_failures: RefCell::new(Vec::new()),
543 prerelease_version: Cell::new(None),
544 tool_artifacts: Default::default(),
545
546 #[cfg(feature = "build-metrics")]
547 metrics: metrics::BuildMetrics::init(),
548 };
549
550 // If local-rust is the same major.minor as the current version, then force a
551 // local-rebuild
552 let local_version_verbose =
553 output(Command::new(&build.initial_rustc).arg("--version").arg("--verbose"));
554 let local_release = local_version_verbose
555 .lines()
556 .filter_map(|x| x.strip_prefix("release:"))
557 .next()
558 .unwrap()
559 .trim();
560 if local_release.split('.').take(2).eq(version.split('.').take(2)) {
561 build.verbose(&format!("auto-detected local-rebuild {}", local_release));
562 build.local_rebuild = true;
563 }
564
565 build.verbose("finding compilers");
566 cc_detect::find(&mut build);
567 // When running `setup`, the profile is about to change, so any requirements we have now may
568 // be different on the next invocation. Don't check for them until the next time x.py is
569 // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
570 //
571 // Similarly, for `setup` we don't actually need submodules or cargo metadata.
572 if !matches!(build.config.cmd, Subcommand::Setup { .. }) {
573 build.verbose("running sanity check");
574 sanity::check(&mut build);
575
576 // Make sure we update these before gathering metadata so we don't get an error about missing
577 // Cargo.toml files.
578 let rust_submodules = [
579 "src/tools/rust-installer",
580 "src/tools/cargo",
581 "library/backtrace",
582 "library/stdarch",
583 ];
584 for s in rust_submodules {
585 build.update_submodule(Path::new(s));
586 }
587 // Now, update all existing submodules.
588 build.update_existing_submodules();
589
590 build.verbose("learning about cargo");
591 metadata::build(&mut build);
592 }
593
594 // Make a symbolic link so we can use a consistent directory in the documentation.
595 let build_triple = build.out.join(&build.build.triple);
596 t!(fs::create_dir_all(&build_triple));
597 let host = build.out.join("host");
598 if let Err(e) = symlink_dir(&build.config, &build_triple, &host) {
599 if e.kind() != ErrorKind::AlreadyExists {
600 panic!(
601 "symlink_dir({} => {}) failed with {}",
602 host.display(),
603 build_triple.display(),
604 e
605 );
606 }
607 }
608
609 build
610 }
611
612 // modified from `check_submodule` and `update_submodule` in bootstrap.py
613 /// Given a path to the directory of a submodule, update it.
614 ///
615 /// `relative_path` should be relative to the root of the git repository, not an absolute path.
616 pub(crate) fn update_submodule(&self, relative_path: &Path) {
617 fn dir_is_empty(dir: &Path) -> bool {
618 t!(std::fs::read_dir(dir)).next().is_none()
619 }
620
621 if !self.config.submodules(&self.rust_info()) {
622 return;
623 }
624
625 let absolute_path = self.config.src.join(relative_path);
626
627 // NOTE: The check for the empty directory is here because when running x.py the first time,
628 // the submodule won't be checked out. Check it out now so we can build it.
629 if !channel::GitInfo::new(false, &absolute_path).is_managed_git_subrepository()
630 && !dir_is_empty(&absolute_path)
631 {
632 return;
633 }
634
635 // check_submodule
636 let checked_out_hash =
637 output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path));
638 // update_submodules
639 let recorded = output(
640 Command::new("git")
641 .args(&["ls-tree", "HEAD"])
642 .arg(relative_path)
643 .current_dir(&self.config.src),
644 );
645 let actual_hash = recorded
646 .split_whitespace()
647 .nth(2)
648 .unwrap_or_else(|| panic!("unexpected output `{}`", recorded));
649
650 // update_submodule
651 if actual_hash == checked_out_hash.trim_end() {
652 // already checked out
653 return;
654 }
655
656 println!("Updating submodule {}", relative_path.display());
657 self.run(
658 Command::new("git")
659 .args(&["submodule", "-q", "sync"])
660 .arg(relative_path)
661 .current_dir(&self.config.src),
662 );
663
664 // Try passing `--progress` to start, then run git again without if that fails.
665 let update = |progress: bool| {
666 let mut git = Command::new("git");
667 git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]);
668 if progress {
669 git.arg("--progress");
670 }
671 git.arg(relative_path).current_dir(&self.config.src);
672 git
673 };
674 // NOTE: doesn't use `try_run` because this shouldn't print an error if it fails.
675 if !update(true).status().map_or(false, |status| status.success()) {
676 self.run(&mut update(false));
677 }
678
679 // Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error).
680 let has_local_modifications = !self.try_run(
681 Command::new("git")
682 .args(&["diff-index", "--quiet", "HEAD"])
683 .current_dir(&absolute_path),
684 );
685 if has_local_modifications {
686 self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path));
687 }
688
689 self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path));
690 self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path));
691
692 if has_local_modifications {
693 self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path));
694 }
695 }
696
697 /// If any submodule has been initialized already, sync it unconditionally.
698 /// This avoids contributors checking in a submodule change by accident.
699 pub fn update_existing_submodules(&self) {
700 // Avoid running git when there isn't a git checkout.
701 if !self.config.submodules(&self.rust_info()) {
702 return;
703 }
704 let output = output(
705 self.config
706 .git()
707 .args(&["config", "--file"])
708 .arg(&self.config.src.join(".gitmodules"))
709 .args(&["--get-regexp", "path"]),
710 );
711 for line in output.lines() {
712 // Look for `submodule.$name.path = $path`
713 // Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
714 let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
715 // Don't update the submodule unless it's already been cloned.
716 if channel::GitInfo::new(false, submodule).is_managed_git_subrepository() {
717 self.update_submodule(submodule);
718 }
719 }
720 }
721
722 /// Executes the entire build, as configured by the flags and configuration.
723 pub fn build(&mut self) {
724 unsafe {
725 job::setup(self);
726 }
727
728 if let Subcommand::Format { check, paths } = &self.config.cmd {
729 return format::format(&builder::Builder::new(&self), *check, &paths);
730 }
731
732 // Download rustfmt early so that it can be used in rust-analyzer configs.
733 let _ = &builder::Builder::new(&self).initial_rustfmt();
734
735 {
736 let builder = builder::Builder::new(&self);
737 if let Some(path) = builder.paths.get(0) {
738 if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") {
739 return;
740 }
741 }
742 }
743
744 if !self.config.dry_run() {
745 {
746 self.config.dry_run = DryRun::SelfCheck;
747 let builder = builder::Builder::new(&self);
748 builder.execute_cli();
749 }
750 self.config.dry_run = DryRun::Disabled;
751 let builder = builder::Builder::new(&self);
752 builder.execute_cli();
753 } else {
754 let builder = builder::Builder::new(&self);
755 builder.execute_cli();
756 }
757
758 // Check for postponed failures from `test --no-fail-fast`.
759 let failures = self.delayed_failures.borrow();
760 if failures.len() > 0 {
761 eprintln!("\n{} command(s) did not execute successfully:\n", failures.len());
762 for failure in failures.iter() {
763 eprintln!(" - {}\n", failure);
764 }
765 detail_exit(1);
766 }
767
768 #[cfg(feature = "build-metrics")]
769 self.metrics.persist(self);
770 }
771
772 /// Clear out `dir` if `input` is newer.
773 ///
774 /// After this executes, it will also ensure that `dir` exists.
775 fn clear_if_dirty(&self, dir: &Path, input: &Path) -> bool {
776 let stamp = dir.join(".stamp");
777 let mut cleared = false;
778 if mtime(&stamp) < mtime(input) {
779 self.verbose(&format!("Dirty - {}", dir.display()));
780 let _ = fs::remove_dir_all(dir);
781 cleared = true;
782 } else if stamp.exists() {
783 return cleared;
784 }
785 t!(fs::create_dir_all(dir));
786 t!(File::create(stamp));
787 cleared
788 }
789
790 fn rust_info(&self) -> &GitInfo {
791 &self.config.rust_info
792 }
793
794 /// Gets the space-separated set of activated features for the standard
795 /// library.
796 fn std_features(&self, target: TargetSelection) -> String {
797 let mut features = " panic-unwind".to_string();
798
799 match self.config.llvm_libunwind(target) {
800 LlvmLibunwind::InTree => features.push_str(" llvm-libunwind"),
801 LlvmLibunwind::System => features.push_str(" system-llvm-libunwind"),
802 LlvmLibunwind::No => {}
803 }
804 if self.config.backtrace {
805 features.push_str(" backtrace");
806 }
807 if self.config.profiler_enabled(target) {
808 features.push_str(" profiler");
809 }
810 features
811 }
812
813 /// Gets the space-separated set of activated features for the compiler.
814 fn rustc_features(&self, kind: Kind) -> String {
815 let mut features = vec![];
816 if self.config.jemalloc {
817 features.push("jemalloc");
818 }
819 if self.config.llvm_enabled() || kind == Kind::Check {
820 features.push("llvm");
821 }
822 // keep in sync with `bootstrap/compile.rs:rustc_cargo_env`
823 if self.config.rustc_parallel {
824 features.push("rustc_use_parallel_compiler");
825 }
826
827 // If debug logging is on, then we want the default for tracing:
828 // https://github.com/tokio-rs/tracing/blob/3dd5c03d907afdf2c39444a29931833335171554/tracing/src/level_filters.rs#L26
829 // which is everything (including debug/trace/etc.)
830 // if its unset, if debug_assertions is on, then debug_logging will also be on
831 // as well as tracing *ignoring* this feature when debug_assertions is on
832 if !self.config.rust_debug_logging {
833 features.push("max_level_info");
834 }
835
836 features.join(" ")
837 }
838
839 /// Component directory that Cargo will produce output into (e.g.
840 /// release/debug)
841 fn cargo_dir(&self) -> &'static str {
842 if self.config.rust_optimize { "release" } else { "debug" }
843 }
844
845 fn tools_dir(&self, compiler: Compiler) -> PathBuf {
846 let out = self
847 .out
848 .join(&*compiler.host.triple)
849 .join(format!("stage{}-tools-bin", compiler.stage));
850 t!(fs::create_dir_all(&out));
851 out
852 }
853
854 /// Returns the root directory for all output generated in a particular
855 /// stage when running with a particular host compiler.
856 ///
857 /// The mode indicates what the root directory is for.
858 fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf {
859 let suffix = match mode {
860 Mode::Std => "-std",
861 Mode::Rustc => "-rustc",
862 Mode::Codegen => "-codegen",
863 Mode::ToolBootstrap => "-bootstrap-tools",
864 Mode::ToolStd | Mode::ToolRustc => "-tools",
865 };
866 self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix))
867 }
868
869 /// Returns the root output directory for all Cargo output in a given stage,
870 /// running a particular compiler, whether or not we're building the
871 /// standard library, and targeting the specified architecture.
872 fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf {
873 self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir())
874 }
875
876 /// Root output directory for LLVM compiled for `target`
877 ///
878 /// Note that if LLVM is configured externally then the directory returned
879 /// will likely be empty.
880 fn llvm_out(&self, target: TargetSelection) -> PathBuf {
881 self.out.join(&*target.triple).join("llvm")
882 }
883
884 fn lld_out(&self, target: TargetSelection) -> PathBuf {
885 self.out.join(&*target.triple).join("lld")
886 }
887
888 /// Output directory for all documentation for a target
889 fn doc_out(&self, target: TargetSelection) -> PathBuf {
890 self.out.join(&*target.triple).join("doc")
891 }
892
893 /// Output directory for all JSON-formatted documentation for a target
894 fn json_doc_out(&self, target: TargetSelection) -> PathBuf {
895 self.out.join(&*target.triple).join("json-doc")
896 }
897
898 fn test_out(&self, target: TargetSelection) -> PathBuf {
899 self.out.join(&*target.triple).join("test")
900 }
901
902 /// Output directory for all documentation for a target
903 fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf {
904 self.out.join(&*target.triple).join("compiler-doc")
905 }
906
907 /// Output directory for some generated md crate documentation for a target (temporary)
908 fn md_doc_out(&self, target: TargetSelection) -> Interned<PathBuf> {
909 INTERNER.intern_path(self.out.join(&*target.triple).join("md-doc"))
910 }
911
912 /// Returns `true` if no custom `llvm-config` is set for the specified target.
913 ///
914 /// If no custom `llvm-config` was specified then Rust's llvm will be used.
915 fn is_rust_llvm(&self, target: TargetSelection) -> bool {
916 match self.config.target_config.get(&target) {
917 Some(Target { llvm_has_rust_patches: Some(patched), .. }) => *patched,
918 Some(Target { llvm_config, .. }) => {
919 // If the user set llvm-config we assume Rust is not patched,
920 // but first check to see if it was configured by llvm-from-ci.
921 (self.config.llvm_from_ci && target == self.config.build) || llvm_config.is_none()
922 }
923 None => true,
924 }
925 }
926
927 /// Returns the path to `FileCheck` binary for the specified target
928 fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf {
929 let target_config = self.config.target_config.get(&target);
930 if let Some(s) = target_config.and_then(|c| c.llvm_filecheck.as_ref()) {
931 s.to_path_buf()
932 } else if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
933 let llvm_bindir = output(Command::new(s).arg("--bindir"));
934 let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", target));
935 if filecheck.exists() {
936 filecheck
937 } else {
938 // On Fedora the system LLVM installs FileCheck in the
939 // llvm subdirectory of the libdir.
940 let llvm_libdir = output(Command::new(s).arg("--libdir"));
941 let lib_filecheck =
942 Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", target));
943 if lib_filecheck.exists() {
944 lib_filecheck
945 } else {
946 // Return the most normal file name, even though
947 // it doesn't exist, so that any error message
948 // refers to that.
949 filecheck
950 }
951 }
952 } else {
953 let base = self.llvm_out(target).join("build");
954 let base = if !self.ninja() && target.contains("msvc") {
955 if self.config.llvm_optimize {
956 if self.config.llvm_release_debuginfo {
957 base.join("RelWithDebInfo")
958 } else {
959 base.join("Release")
960 }
961 } else {
962 base.join("Debug")
963 }
964 } else {
965 base
966 };
967 base.join("bin").join(exe("FileCheck", target))
968 }
969 }
970
971 /// Directory for libraries built from C/C++ code and shared between stages.
972 fn native_dir(&self, target: TargetSelection) -> PathBuf {
973 self.out.join(&*target.triple).join("native")
974 }
975
976 /// Root output directory for rust_test_helpers library compiled for
977 /// `target`
978 fn test_helpers_out(&self, target: TargetSelection) -> PathBuf {
979 self.native_dir(target).join("rust-test-helpers")
980 }
981
982 /// Adds the `RUST_TEST_THREADS` env var if necessary
983 fn add_rust_test_threads(&self, cmd: &mut Command) {
984 if env::var_os("RUST_TEST_THREADS").is_none() {
985 cmd.env("RUST_TEST_THREADS", self.jobs().to_string());
986 }
987 }
988
989 /// Returns the libdir of the snapshot compiler.
990 fn rustc_snapshot_libdir(&self) -> PathBuf {
991 self.rustc_snapshot_sysroot().join(libdir(self.config.build))
992 }
993
994 /// Returns the sysroot of the snapshot compiler.
995 fn rustc_snapshot_sysroot(&self) -> &Path {
996 static SYSROOT_CACHE: OnceCell<PathBuf> = once_cell::sync::OnceCell::new();
997 SYSROOT_CACHE.get_or_init(|| {
998 let mut rustc = Command::new(&self.initial_rustc);
999 rustc.args(&["--print", "sysroot"]);
1000 output(&mut rustc).trim().into()
1001 })
1002 }
1003
1004 /// Runs a command, printing out nice contextual information if it fails.
1005 fn run(&self, cmd: &mut Command) {
1006 if self.config.dry_run() {
1007 return;
1008 }
1009 self.verbose(&format!("running: {:?}", cmd));
1010 run(cmd, self.is_verbose())
1011 }
1012
1013 /// Runs a command, printing out nice contextual information if it fails.
1014 fn run_quiet(&self, cmd: &mut Command) {
1015 if self.config.dry_run() {
1016 return;
1017 }
1018 self.verbose(&format!("running: {:?}", cmd));
1019 run_suppressed(cmd)
1020 }
1021
1022 /// Runs a command, printing out nice contextual information if it fails.
1023 /// Exits if the command failed to execute at all, otherwise returns its
1024 /// `status.success()`.
1025 fn try_run_quiet(&self, cmd: &mut Command) -> bool {
1026 if self.config.dry_run() {
1027 return true;
1028 }
1029 self.verbose(&format!("running: {:?}", cmd));
1030 try_run_suppressed(cmd)
1031 }
1032
1033 pub fn is_verbose_than(&self, level: usize) -> bool {
1034 self.verbosity > level
1035 }
1036
1037 /// Prints a message if this build is configured in more verbose mode than `level`.
1038 fn verbose_than(&self, level: usize, msg: &str) {
1039 if self.is_verbose_than(level) {
1040 println!("{}", msg);
1041 }
1042 }
1043
1044 fn info(&self, msg: &str) {
1045 match self.config.dry_run {
1046 DryRun::SelfCheck => return,
1047 DryRun::Disabled | DryRun::UserSelected => {
1048 println!("{}", msg);
1049 }
1050 }
1051 }
1052
1053 /// Returns the number of parallel jobs that have been configured for this
1054 /// build.
1055 fn jobs(&self) -> u32 {
1056 self.config.jobs.unwrap_or_else(|| {
1057 std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32
1058 })
1059 }
1060
1061 fn debuginfo_map_to(&self, which: GitRepo) -> Option<String> {
1062 if !self.config.rust_remap_debuginfo {
1063 return None;
1064 }
1065
1066 match which {
1067 GitRepo::Rustc => {
1068 let sha = self.rust_sha().unwrap_or(&self.version);
1069 Some(format!("/rustc/{}", sha))
1070 }
1071 GitRepo::Llvm => Some(String::from("/rustc/llvm")),
1072 }
1073 }
1074
1075 /// Returns the path to the C compiler for the target specified.
1076 fn cc(&self, target: TargetSelection) -> &Path {
1077 self.cc[&target].path()
1078 }
1079
1080 /// Returns a list of flags to pass to the C compiler for the target
1081 /// specified.
1082 fn cflags(&self, target: TargetSelection, which: GitRepo, c: CLang) -> Vec<String> {
1083 let base = match c {
1084 CLang::C => &self.cc[&target],
1085 CLang::Cxx => &self.cxx[&target],
1086 };
1087
1088 // Filter out -O and /O (the optimization flags) that we picked up from
1089 // cc-rs because the build scripts will determine that for themselves.
1090 let mut base = base
1091 .args()
1092 .iter()
1093 .map(|s| s.to_string_lossy().into_owned())
1094 .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
1095 .collect::<Vec<String>>();
1096
1097 // If we're compiling on macOS then we add a few unconditional flags
1098 // indicating that we want libc++ (more filled out than libstdc++) and
1099 // we want to compile for 10.7. This way we can ensure that
1100 // LLVM/etc are all properly compiled.
1101 if target.contains("apple-darwin") {
1102 base.push("-stdlib=libc++".into());
1103 }
1104
1105 // Work around an apparently bad MinGW / GCC optimization,
1106 // See: https://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html
1107 // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936
1108 if &*target.triple == "i686-pc-windows-gnu" {
1109 base.push("-fno-omit-frame-pointer".into());
1110 }
1111
1112 if let Some(map_to) = self.debuginfo_map_to(which) {
1113 let map = format!("{}={}", self.src.display(), map_to);
1114 let cc = self.cc(target);
1115 if cc.ends_with("clang") || cc.ends_with("gcc") {
1116 base.push(format!("-fdebug-prefix-map={}", map));
1117 } else if cc.ends_with("clang-cl.exe") {
1118 base.push("-Xclang".into());
1119 base.push(format!("-fdebug-prefix-map={}", map));
1120 }
1121 }
1122 base
1123 }
1124
1125 /// Returns the path to the `ar` archive utility for the target specified.
1126 fn ar(&self, target: TargetSelection) -> Option<&Path> {
1127 self.ar.get(&target).map(|p| &**p)
1128 }
1129
1130 /// Returns the path to the `ranlib` utility for the target specified.
1131 fn ranlib(&self, target: TargetSelection) -> Option<&Path> {
1132 self.ranlib.get(&target).map(|p| &**p)
1133 }
1134
1135 /// Returns the path to the C++ compiler for the target specified.
1136 fn cxx(&self, target: TargetSelection) -> Result<&Path, String> {
1137 match self.cxx.get(&target) {
1138 Some(p) => Ok(p.path()),
1139 None => {
1140 Err(format!("target `{}` is not configured as a host, only as a target", target))
1141 }
1142 }
1143 }
1144
1145 /// Returns the path to the linker for the given target if it needs to be overridden.
1146 fn linker(&self, target: TargetSelection) -> Option<&Path> {
1147 if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.as_ref())
1148 {
1149 Some(linker)
1150 } else if target.contains("vxworks") {
1151 // need to use CXX compiler as linker to resolve the exception functions
1152 // that are only existed in CXX libraries
1153 Some(self.cxx[&target].path())
1154 } else if target != self.config.build
1155 && util::use_host_linker(target)
1156 && !target.contains("msvc")
1157 {
1158 Some(self.cc(target))
1159 } else if self.config.use_lld && !self.is_fuse_ld_lld(target) && self.build == target {
1160 Some(&self.initial_lld)
1161 } else {
1162 None
1163 }
1164 }
1165
1166 // LLD is used through `-fuse-ld=lld` rather than directly.
1167 // Only MSVC targets use LLD directly at the moment.
1168 fn is_fuse_ld_lld(&self, target: TargetSelection) -> bool {
1169 self.config.use_lld && !target.contains("msvc")
1170 }
1171
1172 fn lld_flags(&self, target: TargetSelection) -> impl Iterator<Item = String> {
1173 let mut options = [None, None];
1174
1175 if self.config.use_lld {
1176 if self.is_fuse_ld_lld(target) {
1177 options[0] = Some("-Clink-arg=-fuse-ld=lld".to_string());
1178 }
1179
1180 let no_threads = util::lld_flag_no_threads(target.contains("windows"));
1181 options[1] = Some(format!("-Clink-arg=-Wl,{}", no_threads));
1182 }
1183
1184 IntoIterator::into_iter(options).flatten()
1185 }
1186
1187 /// Returns if this target should statically link the C runtime, if specified
1188 fn crt_static(&self, target: TargetSelection) -> Option<bool> {
1189 if target.contains("pc-windows-msvc") {
1190 Some(true)
1191 } else {
1192 self.config.target_config.get(&target).and_then(|t| t.crt_static)
1193 }
1194 }
1195
1196 /// Returns the "musl root" for this `target`, if defined
1197 fn musl_root(&self, target: TargetSelection) -> Option<&Path> {
1198 self.config
1199 .target_config
1200 .get(&target)
1201 .and_then(|t| t.musl_root.as_ref())
1202 .or_else(|| self.config.musl_root.as_ref())
1203 .map(|p| &**p)
1204 }
1205
1206 /// Returns the "musl libdir" for this `target`.
1207 fn musl_libdir(&self, target: TargetSelection) -> Option<PathBuf> {
1208 let t = self.config.target_config.get(&target)?;
1209 if let libdir @ Some(_) = &t.musl_libdir {
1210 return libdir.clone();
1211 }
1212 self.musl_root(target).map(|root| root.join("lib"))
1213 }
1214
1215 /// Returns the sysroot for the wasi target, if defined
1216 fn wasi_root(&self, target: TargetSelection) -> Option<&Path> {
1217 self.config.target_config.get(&target).and_then(|t| t.wasi_root.as_ref()).map(|p| &**p)
1218 }
1219
1220 /// Returns `true` if this is a no-std `target`, if defined
1221 fn no_std(&self, target: TargetSelection) -> Option<bool> {
1222 self.config.target_config.get(&target).map(|t| t.no_std)
1223 }
1224
1225 /// Returns `true` if the target will be tested using the `remote-test-client`
1226 /// and `remote-test-server` binaries.
1227 fn remote_tested(&self, target: TargetSelection) -> bool {
1228 self.qemu_rootfs(target).is_some()
1229 || target.contains("android")
1230 || env::var_os("TEST_DEVICE_ADDR").is_some()
1231 }
1232
1233 /// Returns the root of the "rootfs" image that this target will be using,
1234 /// if one was configured.
1235 ///
1236 /// If `Some` is returned then that means that tests for this target are
1237 /// emulated with QEMU and binaries will need to be shipped to the emulator.
1238 fn qemu_rootfs(&self, target: TargetSelection) -> Option<&Path> {
1239 self.config.target_config.get(&target).and_then(|t| t.qemu_rootfs.as_ref()).map(|p| &**p)
1240 }
1241
1242 /// Path to the python interpreter to use
1243 fn python(&self) -> &Path {
1244 if self.config.build.ends_with("apple-darwin") {
1245 // Force /usr/bin/python3 on macOS for LLDB tests because we're loading the
1246 // LLDB plugin's compiled module which only works with the system python
1247 // (namely not Homebrew-installed python)
1248 Path::new("/usr/bin/python3")
1249 } else {
1250 self.config
1251 .python
1252 .as_ref()
1253 .expect("python is required for running LLDB or rustdoc tests")
1254 }
1255 }
1256
1257 /// Temporary directory that extended error information is emitted to.
1258 fn extended_error_dir(&self) -> PathBuf {
1259 self.out.join("tmp/extended-error-metadata")
1260 }
1261
1262 /// Tests whether the `compiler` compiling for `target` should be forced to
1263 /// use a stage1 compiler instead.
1264 ///
1265 /// Currently, by default, the build system does not perform a "full
1266 /// bootstrap" by default where we compile the compiler three times.
1267 /// Instead, we compile the compiler two times. The final stage (stage2)
1268 /// just copies the libraries from the previous stage, which is what this
1269 /// method detects.
1270 ///
1271 /// Here we return `true` if:
1272 ///
1273 /// * The build isn't performing a full bootstrap
1274 /// * The `compiler` is in the final stage, 2
1275 /// * We're not cross-compiling, so the artifacts are already available in
1276 /// stage1
1277 ///
1278 /// When all of these conditions are met the build will lift artifacts from
1279 /// the previous stage forward.
1280 fn force_use_stage1(&self, compiler: Compiler, target: TargetSelection) -> bool {
1281 !self.config.full_bootstrap
1282 && compiler.stage >= 2
1283 && (self.hosts.iter().any(|h| *h == target) || target == self.build)
1284 }
1285
1286 /// Given `num` in the form "a.b.c" return a "release string" which
1287 /// describes the release version number.
1288 ///
1289 /// For example on nightly this returns "a.b.c-nightly", on beta it returns
1290 /// "a.b.c-beta.1" and on stable it just returns "a.b.c".
1291 fn release(&self, num: &str) -> String {
1292 match &self.config.channel[..] {
1293 "stable" => num.to_string(),
1294 "beta" => {
1295 if self.rust_info().is_managed_git_subrepository() && !self.config.ignore_git {
1296 format!("{}-beta.{}", num, self.beta_prerelease_version())
1297 } else {
1298 format!("{}-beta", num)
1299 }
1300 }
1301 "nightly" => format!("{}-nightly", num),
1302 _ => format!("{}-dev", num),
1303 }
1304 }
1305
1306 fn beta_prerelease_version(&self) -> u32 {
1307 if let Some(s) = self.prerelease_version.get() {
1308 return s;
1309 }
1310
1311 // Figure out how many merge commits happened since we branched off master.
1312 // That's our beta number!
1313 // (Note that we use a `..` range, not the `...` symmetric difference.)
1314 let count =
1315 output(self.config.git().arg("rev-list").arg("--count").arg("--merges").arg(format!(
1316 "refs/remotes/origin/{}..HEAD",
1317 self.config.stage0_metadata.config.nightly_branch
1318 )));
1319 let n = count.trim().parse().unwrap();
1320 self.prerelease_version.set(Some(n));
1321 n
1322 }
1323
1324 /// Returns the value of `release` above for Rust itself.
1325 fn rust_release(&self) -> String {
1326 self.release(&self.version)
1327 }
1328
1329 /// Returns the "package version" for a component given the `num` release
1330 /// number.
1331 ///
1332 /// The package version is typically what shows up in the names of tarballs.
1333 /// For channels like beta/nightly it's just the channel name, otherwise
1334 /// it's the `num` provided.
1335 fn package_vers(&self, num: &str) -> String {
1336 match &self.config.channel[..] {
1337 "stable" => num.to_string(),
1338 "beta" => "beta".to_string(),
1339 "nightly" => "nightly".to_string(),
1340 _ => format!("{}-dev", num),
1341 }
1342 }
1343
1344 /// Returns the value of `package_vers` above for Rust itself.
1345 fn rust_package_vers(&self) -> String {
1346 self.package_vers(&self.version)
1347 }
1348
1349 /// Returns the `version` string associated with this compiler for Rust
1350 /// itself.
1351 ///
1352 /// Note that this is a descriptive string which includes the commit date,
1353 /// sha, version, etc.
1354 fn rust_version(&self) -> String {
1355 let mut version = self.rust_info().version(self, &self.version);
1356 if let Some(ref s) = self.config.description {
1357 version.push_str(" (");
1358 version.push_str(s);
1359 version.push(')');
1360 }
1361 version
1362 }
1363
1364 /// Returns the full commit hash.
1365 fn rust_sha(&self) -> Option<&str> {
1366 self.rust_info().sha()
1367 }
1368
1369 /// Returns the `a.b.c` version that the given package is at.
1370 fn release_num(&self, package: &str) -> String {
1371 let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package));
1372 let toml = t!(fs::read_to_string(&toml_file_name));
1373 for line in toml.lines() {
1374 if let Some(stripped) =
1375 line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\""))
1376 {
1377 return stripped.to_owned();
1378 }
1379 }
1380
1381 panic!("failed to find version in {}'s Cargo.toml", package)
1382 }
1383
1384 /// Returns `true` if unstable features should be enabled for the compiler
1385 /// we're building.
1386 fn unstable_features(&self) -> bool {
1387 match &self.config.channel[..] {
1388 "stable" | "beta" => false,
1389 "nightly" | _ => true,
1390 }
1391 }
1392
1393 /// Returns a Vec of all the dependencies of the given root crate,
1394 /// including transitive dependencies and the root itself. Only includes
1395 /// "local" crates (those in the local source tree, not from a registry).
1396 fn in_tree_crates(&self, root: &str, target: Option<TargetSelection>) -> Vec<&Crate> {
1397 let mut ret = Vec::new();
1398 let mut list = vec![INTERNER.intern_str(root)];
1399 let mut visited = HashSet::new();
1400 while let Some(krate) = list.pop() {
1401 let krate = self
1402 .crates
1403 .get(&krate)
1404 .unwrap_or_else(|| panic!("metadata missing for {krate}: {:?}", self.crates));
1405 ret.push(krate);
1406 for dep in &krate.deps {
1407 if !self.crates.contains_key(dep) {
1408 // Ignore non-workspace members.
1409 continue;
1410 }
1411 // Don't include optional deps if their features are not
1412 // enabled. Ideally this would be computed from `cargo
1413 // metadata --features …`, but that is somewhat slow. In
1414 // the future, we may want to consider just filtering all
1415 // build and dev dependencies in metadata::build.
1416 if visited.insert(dep)
1417 && (dep != "profiler_builtins"
1418 || target
1419 .map(|t| self.config.profiler_enabled(t))
1420 .unwrap_or_else(|| self.config.any_profiler_enabled()))
1421 && (dep != "rustc_codegen_llvm" || self.config.llvm_enabled())
1422 {
1423 list.push(*dep);
1424 }
1425 }
1426 }
1427 ret
1428 }
1429
1430 fn read_stamp_file(&self, stamp: &Path) -> Vec<(PathBuf, DependencyType)> {
1431 if self.config.dry_run() {
1432 return Vec::new();
1433 }
1434
1435 let mut paths = Vec::new();
1436 let contents = t!(fs::read(stamp), &stamp);
1437 // This is the method we use for extracting paths from the stamp file passed to us. See
1438 // run_cargo for more information (in compile.rs).
1439 for part in contents.split(|b| *b == 0) {
1440 if part.is_empty() {
1441 continue;
1442 }
1443 let dependency_type = match part[0] as char {
1444 'h' => DependencyType::Host,
1445 's' => DependencyType::TargetSelfContained,
1446 't' => DependencyType::Target,
1447 _ => unreachable!(),
1448 };
1449 let path = PathBuf::from(t!(str::from_utf8(&part[1..])));
1450 paths.push((path, dependency_type));
1451 }
1452 paths
1453 }
1454
1455 /// Copies a file from `src` to `dst`
1456 pub fn copy(&self, src: &Path, dst: &Path) {
1457 self.copy_internal(src, dst, false);
1458 }
1459
1460 fn copy_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) {
1461 if self.config.dry_run() {
1462 return;
1463 }
1464 self.verbose_than(1, &format!("Copy {:?} to {:?}", src, dst));
1465 if src == dst {
1466 return;
1467 }
1468 let _ = fs::remove_file(&dst);
1469 let metadata = t!(src.symlink_metadata());
1470 let mut src = src.to_path_buf();
1471 if metadata.file_type().is_symlink() {
1472 if dereference_symlinks {
1473 src = t!(fs::canonicalize(src));
1474 } else {
1475 let link = t!(fs::read_link(src));
1476 t!(self.symlink_file(link, dst));
1477 return;
1478 }
1479 }
1480 if let Ok(()) = fs::hard_link(&src, dst) {
1481 // Attempt to "easy copy" by creating a hard link
1482 // (symlinks don't work on windows), but if that fails
1483 // just fall back to a slow `copy` operation.
1484 } else {
1485 if let Err(e) = fs::copy(&src, dst) {
1486 panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e)
1487 }
1488 t!(fs::set_permissions(dst, metadata.permissions()));
1489 let atime = FileTime::from_last_access_time(&metadata);
1490 let mtime = FileTime::from_last_modification_time(&metadata);
1491 t!(filetime::set_file_times(dst, atime, mtime));
1492 }
1493 }
1494
1495 /// Copies the `src` directory recursively to `dst`. Both are assumed to exist
1496 /// when this function is called.
1497 pub fn cp_r(&self, src: &Path, dst: &Path) {
1498 if self.config.dry_run() {
1499 return;
1500 }
1501 for f in self.read_dir(src) {
1502 let path = f.path();
1503 let name = path.file_name().unwrap();
1504 let dst = dst.join(name);
1505 if t!(f.file_type()).is_dir() {
1506 t!(fs::create_dir_all(&dst));
1507 self.cp_r(&path, &dst);
1508 } else {
1509 let _ = fs::remove_file(&dst);
1510 self.copy(&path, &dst);
1511 }
1512 }
1513 }
1514
1515 /// Copies the `src` directory recursively to `dst`. Both are assumed to exist
1516 /// when this function is called. Unwanted files or directories can be skipped
1517 /// by returning `false` from the filter function.
1518 pub fn cp_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) {
1519 // Immediately recurse with an empty relative path
1520 self.recurse_(src, dst, Path::new(""), filter)
1521 }
1522
1523 // Inner function does the actual work
1524 fn recurse_(&self, src: &Path, dst: &Path, relative: &Path, filter: &dyn Fn(&Path) -> bool) {
1525 for f in self.read_dir(src) {
1526 let path = f.path();
1527 let name = path.file_name().unwrap();
1528 let dst = dst.join(name);
1529 let relative = relative.join(name);
1530 // Only copy file or directory if the filter function returns true
1531 if filter(&relative) {
1532 if t!(f.file_type()).is_dir() {
1533 let _ = fs::remove_dir_all(&dst);
1534 self.create_dir(&dst);
1535 self.recurse_(&path, &dst, &relative, filter);
1536 } else {
1537 let _ = fs::remove_file(&dst);
1538 self.copy(&path, &dst);
1539 }
1540 }
1541 }
1542 }
1543
1544 fn copy_to_folder(&self, src: &Path, dest_folder: &Path) {
1545 let file_name = src.file_name().unwrap();
1546 let dest = dest_folder.join(file_name);
1547 self.copy(src, &dest);
1548 }
1549
1550 fn install(&self, src: &Path, dstdir: &Path, perms: u32) {
1551 if self.config.dry_run() {
1552 return;
1553 }
1554 let dst = dstdir.join(src.file_name().unwrap());
1555 self.verbose_than(1, &format!("Install {:?} to {:?}", src, dst));
1556 t!(fs::create_dir_all(dstdir));
1557 if !src.exists() {
1558 panic!("Error: File \"{}\" not found!", src.display());
1559 }
1560 self.copy_internal(src, &dst, true);
1561 chmod(&dst, perms);
1562 }
1563
1564 fn read(&self, path: &Path) -> String {
1565 if self.config.dry_run() {
1566 return String::new();
1567 }
1568 t!(fs::read_to_string(path))
1569 }
1570
1571 fn create_dir(&self, dir: &Path) {
1572 if self.config.dry_run() {
1573 return;
1574 }
1575 t!(fs::create_dir_all(dir))
1576 }
1577
1578 fn remove_dir(&self, dir: &Path) {
1579 if self.config.dry_run() {
1580 return;
1581 }
1582 t!(fs::remove_dir_all(dir))
1583 }
1584
1585 fn read_dir(&self, dir: &Path) -> impl Iterator<Item = fs::DirEntry> {
1586 let iter = match fs::read_dir(dir) {
1587 Ok(v) => v,
1588 Err(_) if self.config.dry_run() => return vec![].into_iter(),
1589 Err(err) => panic!("could not read dir {:?}: {:?}", dir, err),
1590 };
1591 iter.map(|e| t!(e)).collect::<Vec<_>>().into_iter()
1592 }
1593
1594 fn symlink_file<P: AsRef<Path>, Q: AsRef<Path>>(&self, src: P, link: Q) -> io::Result<()> {
1595 #[cfg(unix)]
1596 use std::os::unix::fs::symlink as symlink_file;
1597 #[cfg(windows)]
1598 use std::os::windows::fs::symlink_file;
1599 if !self.config.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) }
1600 }
1601
1602 /// Returns if config.ninja is enabled, and checks for ninja existence,
1603 /// exiting with a nicer error message if not.
1604 fn ninja(&self) -> bool {
1605 let mut cmd_finder = crate::sanity::Finder::new();
1606
1607 if self.config.ninja_in_file {
1608 // Some Linux distros rename `ninja` to `ninja-build`.
1609 // CMake can work with either binary name.
1610 if cmd_finder.maybe_have("ninja-build").is_none()
1611 && cmd_finder.maybe_have("ninja").is_none()
1612 {
1613 eprintln!(
1614 "
1615 Couldn't find required command: ninja (or ninja-build)
1616
1617 You should install ninja as described at
1618 <https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages>,
1619 or set `ninja = false` in the `[llvm]` section of `config.toml`.
1620 Alternatively, set `download-ci-llvm = true` in that `[llvm]` section
1621 to download LLVM rather than building it.
1622 "
1623 );
1624 detail_exit(1);
1625 }
1626 }
1627
1628 // If ninja isn't enabled but we're building for MSVC then we try
1629 // doubly hard to enable it. It was realized in #43767 that the msbuild
1630 // CMake generator for MSVC doesn't respect configuration options like
1631 // disabling LLVM assertions, which can often be quite important!
1632 //
1633 // In these cases we automatically enable Ninja if we find it in the
1634 // environment.
1635 if !self.config.ninja_in_file && self.config.build.contains("msvc") {
1636 if cmd_finder.maybe_have("ninja").is_some() {
1637 return true;
1638 }
1639 }
1640
1641 self.config.ninja_in_file
1642 }
1643 }
1644
1645 #[cfg(unix)]
1646 fn chmod(path: &Path, perms: u32) {
1647 use std::os::unix::fs::*;
1648 t!(fs::set_permissions(path, fs::Permissions::from_mode(perms)));
1649 }
1650 #[cfg(windows)]
1651 fn chmod(_path: &Path, _perms: u32) {}
1652
1653 /// If code is not 0 (successful exit status), exit status is 101 (rust's default error code.)
1654 /// If the test is running and code is an error code, it will cause a panic.
1655 fn detail_exit(code: i32) -> ! {
1656 // if in test and code is an error code, panic with status code provided
1657 if cfg!(test) {
1658 panic!("status code: {}", code);
1659 } else {
1660 // otherwise,exit with provided status code
1661 std::process::exit(code);
1662 }
1663 }
1664
1665 impl Compiler {
1666 pub fn with_stage(mut self, stage: u32) -> Compiler {
1667 self.stage = stage;
1668 self
1669 }
1670
1671 /// Returns `true` if this is a snapshot compiler for `build`'s configuration
1672 pub fn is_snapshot(&self, build: &Build) -> bool {
1673 self.stage == 0 && self.host == build.build
1674 }
1675
1676 /// Returns if this compiler should be treated as a final stage one in the
1677 /// current build session.
1678 /// This takes into account whether we're performing a full bootstrap or
1679 /// not; don't directly compare the stage with `2`!
1680 pub fn is_final_stage(&self, build: &Build) -> bool {
1681 let final_stage = if build.config.full_bootstrap { 2 } else { 1 };
1682 self.stage >= final_stage
1683 }
1684 }
1685
1686 fn envify(s: &str) -> String {
1687 s.chars()
1688 .map(|c| match c {
1689 '-' => '_',
1690 c => c,
1691 })
1692 .flat_map(|c| c.to_uppercase())
1693 .collect()
1694 }