1 //! Implementation of rustbuild, the Rust build system.
3 //! This module, and its descendants, are the implementation of the Rust build
4 //! system. Most of this build system is backed by Cargo but the outer layer
5 //! here serves as the ability to orchestrate calling Cargo, sequencing Cargo
6 //! builds, building artifacts like LLVM, etc. The goals of rustbuild are:
8 //! * To be an easily understandable, easily extensible, and maintainable build
10 //! * Leverage standard tools in the Rust ecosystem to build the compiler, aka
11 //! crates.io and Cargo.
12 //! * A standard interface to build across all platforms, including MSVC
16 //! The build system defers most of the complicated logic managing invocations
17 //! of rustc and rustdoc to Cargo itself. However, moving through various stages
18 //! and copying artifacts is still necessary for it to do. Each time rustbuild
19 //! is invoked, it will iterate through the list of predefined steps and execute
20 //! each serially in turn if it matches the paths passed or is a default rule.
21 //! For each step rustbuild relies on the step internally being incremental and
22 //! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
23 //! to appropriate test harnesses and such.
25 //! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
26 //! have its own parallelism and incremental management. Later steps, like
27 //! tests, aren't incremental and simply run the entire suite currently.
28 //! However, compiletest itself tries to avoid running tests when the artifacts
29 //! that are involved (mainly the compiler) haven't changed.
31 //! When you execute `x.py build`, the steps executed are:
33 //! * First, the python script is run. This will automatically download the
34 //! stage0 rustc and cargo according to `src/stage0.json`, or use the cached
35 //! versions if they're available. These are then used to compile rustbuild
36 //! itself (using Cargo). Finally, control is then transferred to rustbuild.
38 //! * Rustbuild takes over, performs sanity checks, probes the environment,
39 //! reads configuration, and starts executing steps as it reads the command
40 //! line arguments (paths) or going through the default rules.
42 //! The build output will be something like the following:
44 //! Building stage0 std artifacts
45 //! Copying stage0 std
46 //! Building stage0 test artifacts
47 //! Copying stage0 test
48 //! Building stage0 compiler artifacts
49 //! Copying stage0 rustc
50 //! Assembling stage1 compiler
51 //! Building stage1 std artifacts
52 //! Copying stage1 std
53 //! Building stage1 test artifacts
54 //! Copying stage1 test
55 //! Building stage1 compiler artifacts
56 //! Copying stage1 rustc
57 //! Assembling stage2 compiler
58 //! Uplifting stage1 std
59 //! Uplifting stage1 test
60 //! Uplifting stage1 rustc
62 //! Let's disect that a little:
64 //! ## Building stage0 {std,test,compiler} artifacts
66 //! These steps use the provided (downloaded, usually) compiler to compile the
67 //! local Rust source into libraries we can use.
69 //! ## Copying stage0 {std,test,rustc}
71 //! This copies the build output from Cargo into
72 //! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: this step's
73 //! documentation should be expanded -- the information already here may be
76 //! ## Assembling stage1 compiler
78 //! This copies the libraries we built in "building stage0 ... artifacts" into
79 //! the stage1 compiler's lib directory. These are the host libraries that the
80 //! compiler itself uses to run. These aren't actually used by artifacts the new
81 //! compiler generates. This step also copies the rustc and rustdoc binaries we
82 //! generated into build/$HOST/stage/bin.
84 //! The stage1/bin/rustc is a fully functional compiler, but it doesn't yet have
85 //! any libraries to link built binaries or libraries to. The next 3 steps will
86 //! provide those libraries for it; they are mostly equivalent to constructing
87 //! the stage1/bin compiler so we don't go through them individually.
89 //! ## Uplifting stage1 {std,test,rustc}
91 //! This step copies the libraries from the stage1 compiler sysroot into the
92 //! stage2 compiler. This is done to avoid rebuilding the compiler; libraries
93 //! we'd build in this step should be identical (in function, if not necessarily
94 //! identical on disk) so there's no need to recompile the compiler again. Note
95 //! that if you want to, you can enable the full-bootstrap option to change this
98 //! Each step is driven by a separate Cargo project and rustbuild orchestrates
99 //! copying files between steps and otherwise preparing for Cargo to run.
101 //! ## Further information
103 //! More documentation can be found in each respective module below, and you can
104 //! also check out the `src/bootstrap/README.md` file for more information.
106 use std
::cell
::{Cell, RefCell}
;
107 use std
::collections
::{HashMap, HashSet}
;
109 use std
::fs
::{self, File}
;
111 use std
::path
::{Path, PathBuf}
;
112 use std
::process
::Command
;
116 use filetime
::FileTime
;
117 use once_cell
::sync
::OnceCell
;
119 use crate::builder
::Kind
;
120 use crate::config
::{LlvmLibunwind, TargetSelection}
;
122 check_run
, exe
, libdir
, mtime
, output
, run
, run_suppressed
, try_run
, try_run_suppressed
, CiEnv
,
150 #[cfg(feature = "build-metrics")]
156 #[cfg(all(unix, not(target_os = "haiku")))]
158 pub unsafe fn setup(build
: &mut crate::Build
) {
159 if build
.config
.low_priority
{
160 libc
::setpriority(libc
::PRIO_PGRP
as _
, 0, 10);
165 #[cfg(any(target_os = "haiku", target_os = "hermit", not(any(unix, windows))))]
167 pub unsafe fn setup(_build
: &mut crate::Build
) {}
170 pub use crate::builder
::PathSet
;
171 use crate::cache
::{Interned, INTERNER}
;
172 pub use crate::config
::Config
;
173 pub use crate::flags
::Subcommand
;
175 const LLVM_TOOLS
: &[&str] = &[
176 "llvm-cov", // used to generate coverage report
177 "llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility
178 "llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume
179 "llvm-objdump", // used to disassemble programs
180 "llvm-profdata", // used to inspect and merge files generated by profiles
181 "llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide
182 "llvm-size", // used to prints the size of the linker sections of a program
183 "llvm-strip", // used to discard symbols from binary files to reduce their size
184 "llvm-ar", // used for creating and modifying archive files
185 "llvm-as", // used to convert LLVM assembly to LLVM bitcode
186 "llvm-dis", // used to disassemble LLVM bitcode
187 "llc", // used to compile LLVM bytecode
188 "opt", // used to optimize LLVM bytecode
191 /// LLD file names for all flavors.
192 const LLD_FILE_NAMES
: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"];
194 pub const VERSION
: usize = 2;
196 /// Extra --check-cfg to add when building
197 /// (Mode restriction, config name, config values (if any))
198 const EXTRA_CHECK_CFGS
: &[(Option
<Mode
>, &'
static str, Option
<&[&'
static str]>)] = &[
199 (None
, "bootstrap", None
),
200 (Some(Mode
::Rustc
), "parallel_compiler", None
),
201 (Some(Mode
::ToolRustc
), "parallel_compiler", None
),
202 (Some(Mode
::Codegen
), "parallel_compiler", None
),
203 (Some(Mode
::Std
), "stdarch_intel_sde", None
),
204 (Some(Mode
::Std
), "no_fp_fmt_parse", None
),
205 (Some(Mode
::Std
), "no_global_oom_handling", None
),
206 (Some(Mode
::Std
), "no_rc", None
),
207 (Some(Mode
::Std
), "no_sync", None
),
208 (Some(Mode
::Std
), "freebsd12", None
),
209 (Some(Mode
::Std
), "backtrace_in_libstd", None
),
210 /* Extra values not defined in the built-in targets yet, but used in std */
211 (Some(Mode
::Std
), "target_env", Some(&["libnx"])),
212 (Some(Mode
::Std
), "target_os", Some(&["watchos"])),
216 Some(&["asmjs", "spirv", "nvptx", "nvptx64", "le32", "xtensa"]),
218 /* Extra names used by dependencies */
219 // FIXME: Used by rustfmt is their test but is invalid (neither cargo nor bootstrap ever set
220 // this config) should probably by removed or use a allow attribute.
221 (Some(Mode
::ToolRustc
), "release", None
),
222 // FIXME: Used by stdarch in their test, should use a allow attribute instead.
223 (Some(Mode
::Std
), "dont_compile_me", None
),
224 // FIXME: Used by serde_json, but we should not be triggering on external dependencies.
225 (Some(Mode
::Rustc
), "no_btreemap_remove_entry", None
),
226 (Some(Mode
::ToolRustc
), "no_btreemap_remove_entry", None
),
227 // FIXME: Used by crossbeam-utils, but we should not be triggering on external dependencies.
228 (Some(Mode
::Rustc
), "crossbeam_loom", None
),
229 (Some(Mode
::ToolRustc
), "crossbeam_loom", None
),
230 // FIXME: Used by proc-macro2, but we should not be triggering on external dependencies.
231 (Some(Mode
::Rustc
), "span_locations", None
),
232 (Some(Mode
::ToolRustc
), "span_locations", None
),
233 // Can be passed in RUSTFLAGS to prevent direct syscalls in rustix.
234 (None
, "rustix_use_libc", None
),
237 /// A structure representing a Rust compiler.
239 /// Each compiler has a `stage` that it is associated with and a `host` that
240 /// corresponds to the platform the compiler runs on. This structure is used as
241 /// a parameter to many methods below.
242 #[derive(Eq, PartialOrd, Ord, PartialEq, Clone, Copy, Hash, Debug)]
243 pub struct Compiler
{
245 host
: TargetSelection
,
248 #[derive(PartialEq, Eq, Copy, Clone, Debug)]
250 /// Run normal tests and doc tests (default).
252 /// Do not run any doc tests.
254 /// Only run doc tests.
263 /// Global configuration for the build system.
265 /// This structure transitively contains all configuration for the build system.
266 /// All filesystem-encoded configuration is in `config`, all flags are in
267 /// `flags`, and then parsed or probed information is listed in the keys below.
269 /// This structure is a parameter of almost all methods in the build system,
270 /// although most functions are implemented as free functions rather than
271 /// methods specifically on this structure itself (to make it easier to
274 /// User-specified configuration from `config.toml`.
277 // Version information
280 // Properties derived from the above configuration
283 bootstrap_out
: PathBuf
,
284 rust_info
: channel
::GitInfo
,
285 cargo_info
: channel
::GitInfo
,
286 rust_analyzer_info
: channel
::GitInfo
,
287 clippy_info
: channel
::GitInfo
,
288 miri_info
: channel
::GitInfo
,
289 rustfmt_info
: channel
::GitInfo
,
290 in_tree_llvm_info
: channel
::GitInfo
,
296 // Targets for which to build
297 build
: TargetSelection
,
298 hosts
: Vec
<TargetSelection
>,
299 targets
: Vec
<TargetSelection
>,
301 initial_rustc
: PathBuf
,
302 initial_cargo
: PathBuf
,
303 initial_lld
: PathBuf
,
304 initial_libdir
: PathBuf
,
306 // Runtime state filled in later on
307 // C/C++ compilers and archiver for all targets
308 cc
: HashMap
<TargetSelection
, cc
::Tool
>,
309 cxx
: HashMap
<TargetSelection
, cc
::Tool
>,
310 ar
: HashMap
<TargetSelection
, PathBuf
>,
311 ranlib
: HashMap
<TargetSelection
, PathBuf
>,
313 // allow bidirectional lookups: both name -> path and path -> name
314 crates
: HashMap
<Interned
<String
>, Crate
>,
315 crate_paths
: HashMap
<PathBuf
, Interned
<String
>>,
318 delayed_failures
: RefCell
<Vec
<String
>>,
319 prerelease_version
: Cell
<Option
<u32>>,
321 RefCell
<HashMap
<TargetSelection
, HashMap
<String
, (&'
static str, PathBuf
, Vec
<String
>)>>>,
323 #[cfg(feature = "build-metrics")]
324 metrics
: metrics
::BuildMetrics
,
329 name
: Interned
<String
>,
330 deps
: HashSet
<Interned
<String
>>,
335 fn local_path(&self, build
: &Build
) -> PathBuf
{
336 self.path
.strip_prefix(&build
.config
.src
).unwrap().into()
340 /// When building Rust various objects are handled differently.
341 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
342 pub enum DependencyType
{
343 /// Libraries originating from proc-macros.
345 /// Typical Rust libraries.
347 /// Non Rust libraries and objects shipped to ease usage of certain targets.
351 /// The various "modes" of invoking Cargo.
353 /// These entries currently correspond to the various output directories of the
354 /// build system, with each mod generating output in a different directory.
355 #[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
357 /// Build the standard library, placing output in the "stageN-std" directory.
360 /// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory.
363 /// Build a codegen backend for rustc, placing the output in the "stageN-codegen" directory.
366 /// Build a tool, placing output in the "stage0-bootstrap-tools"
367 /// directory. This is for miscellaneous sets of tools that are built
368 /// using the bootstrap stage0 compiler in its entirety (target libraries
369 /// and all). Typically these tools compile with stable Rust.
372 /// Build a tool which uses the locally built std, placing output in the
373 /// "stageN-tools" directory. Its usage is quite rare, mainly used by
374 /// compiletest which needs libtest.
377 /// Build a tool which uses the locally built rustc and the target std,
378 /// placing the output in the "stageN-tools" directory. This is used for
379 /// anything that needs a fully functional rustc, such as rustdoc, clippy,
380 /// cargo, rls, rustfmt, miri, etc.
385 pub fn is_tool(&self) -> bool
{
386 matches
!(self, Mode
::ToolBootstrap
| Mode
::ToolRustc
| Mode
::ToolStd
)
389 pub fn must_support_dlopen(&self) -> bool
{
390 matches
!(self, Mode
::Std
| Mode
::Codegen
)
400 /// Creates a new set of build configuration from the `flags` on the command
401 /// line and the filesystem `config`.
403 /// By default all build output will be placed in the current directory.
404 pub fn new(mut config
: Config
) -> Build
{
405 let src
= config
.src
.clone();
406 let out
= config
.out
.clone();
409 // keep this consistent with the equivalent check in x.py:
410 // https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/bootstrap.py#L796-L797
411 let is_sudo
= match env
::var_os("SUDO_USER") {
412 Some(_sudo_user
) => {
413 let uid
= unsafe { libc::getuid() }
;
421 let ignore_git
= config
.ignore_git
;
422 let rust_info
= channel
::GitInfo
::new(ignore_git
, &src
);
423 let cargo_info
= channel
::GitInfo
::new(ignore_git
, &src
.join("src/tools/cargo"));
424 let rust_analyzer_info
=
425 channel
::GitInfo
::new(ignore_git
, &src
.join("src/tools/rust-analyzer"));
426 let clippy_info
= channel
::GitInfo
::new(ignore_git
, &src
.join("src/tools/clippy"));
427 let miri_info
= channel
::GitInfo
::new(ignore_git
, &src
.join("src/tools/miri"));
428 let rustfmt_info
= channel
::GitInfo
::new(ignore_git
, &src
.join("src/tools/rustfmt"));
430 // we always try to use git for LLVM builds
431 let in_tree_llvm_info
= channel
::GitInfo
::new(false, &src
.join("src/llvm-project"));
433 let initial_target_libdir_str
= if config
.dry_run
{
434 "/dummy/lib/path/to/lib/".to_string()
437 Command
::new(&config
.initial_rustc
)
439 .arg(config
.build
.rustc_target_arg())
441 .arg("target-libdir"),
444 let initial_target_dir
= Path
::new(&initial_target_libdir_str
).parent().unwrap();
445 let initial_lld
= initial_target_dir
.join("bin").join("rust-lld");
447 let initial_sysroot
= if config
.dry_run
{
450 output(Command
::new(&config
.initial_rustc
).arg("--print").arg("sysroot"))
452 let initial_libdir
= initial_target_dir
457 .strip_prefix(initial_sysroot
.trim())
461 let version
= std
::fs
::read_to_string(src
.join("src").join("version"))
462 .expect("failed to read src/version");
463 let version
= version
.trim();
465 let bootstrap_out
= std
::env
::current_exe()
466 .expect("could not determine path to running process")
470 if !bootstrap_out
.join(exe("rustc", config
.build
)).exists() && !cfg
!(test
) {
471 // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented
473 "`rustc` not found in {}, run `cargo build --bins` before `cargo run`",
474 bootstrap_out
.display()
478 if rust_info
.is_from_tarball() && config
.description
.is_none() {
479 config
.description
= Some("built from a source tarball".to_owned());
482 let mut build
= Build
{
483 initial_rustc
: config
.initial_rustc
.clone(),
484 initial_cargo
: config
.initial_cargo
.clone(),
487 local_rebuild
: config
.local_rebuild
,
488 fail_fast
: config
.cmd
.fail_fast(),
489 doc_tests
: config
.cmd
.doc_tests(),
490 verbosity
: config
.verbose
,
493 hosts
: config
.hosts
.clone(),
494 targets
: config
.targets
.clone(),
497 version
: version
.to_string(),
512 ranlib
: HashMap
::new(),
513 crates
: HashMap
::new(),
514 crate_paths
: HashMap
::new(),
516 ci_env
: CiEnv
::current(),
517 delayed_failures
: RefCell
::new(Vec
::new()),
518 prerelease_version
: Cell
::new(None
),
519 tool_artifacts
: Default
::default(),
521 #[cfg(feature = "build-metrics")]
522 metrics
: metrics
::BuildMetrics
::init(),
525 build
.verbose("finding compilers");
526 cc_detect
::find(&mut build
);
527 // When running `setup`, the profile is about to change, so any requirements we have now may
528 // be different on the next invocation. Don't check for them until the next time x.py is
529 // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing.
530 if !matches
!(build
.config
.cmd
, Subcommand
::Setup { .. }
) {
531 build
.verbose("running sanity check");
532 sanity
::check(&mut build
);
535 // If local-rust is the same major.minor as the current version, then force a
537 let local_version_verbose
=
538 output(Command
::new(&build
.initial_rustc
).arg("--version").arg("--verbose"));
539 let local_release
= local_version_verbose
541 .filter_map(|x
| x
.strip_prefix("release:"))
545 if local_release
.split('
.'
).take(2).eq(version
.split('
.'
).take(2)) {
546 build
.verbose(&format
!("auto-detected local-rebuild {}", local_release
));
547 build
.local_rebuild
= true;
550 // Make sure we update these before gathering metadata so we don't get an error about missing
552 let rust_submodules
=
553 ["src/tools/rust-installer", "src/tools/cargo", "library/backtrace", "library/stdarch"];
554 for s
in rust_submodules
{
555 build
.update_submodule(Path
::new(s
));
558 build
.verbose("learning about cargo");
559 metadata
::build(&mut build
);
564 // modified from `check_submodule` and `update_submodule` in bootstrap.py
565 /// Given a path to the directory of a submodule, update it.
567 /// `relative_path` should be relative to the root of the git repository, not an absolute path.
568 pub(crate) fn update_submodule(&self, relative_path
: &Path
) {
569 fn dir_is_empty(dir
: &Path
) -> bool
{
570 t
!(std
::fs
::read_dir(dir
)).next().is_none()
573 if !self.config
.submodules(&self.rust_info
) {
577 let absolute_path
= self.config
.src
.join(relative_path
);
579 // NOTE: The check for the empty directory is here because when running x.py the first time,
580 // the submodule won't be checked out. Check it out now so we can build it.
581 if !channel
::GitInfo
::new(false, &absolute_path
).is_managed_git_subrepository()
582 && !dir_is_empty(&absolute_path
)
588 let checked_out_hash
=
589 output(Command
::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path
));
591 let recorded
= output(
593 .args(&["ls-tree", "HEAD"])
595 .current_dir(&self.config
.src
),
597 let actual_hash
= recorded
600 .unwrap_or_else(|| panic
!("unexpected output `{}`", recorded
));
603 if actual_hash
== checked_out_hash
.trim_end() {
604 // already checked out
608 println
!("Updating submodule {}", relative_path
.display());
611 .args(&["submodule", "-q", "sync"])
613 .current_dir(&self.config
.src
),
616 // Try passing `--progress` to start, then run git again without if that fails.
617 let update
= |progress
: bool
| {
618 let mut git
= Command
::new("git");
619 git
.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]);
621 git
.arg("--progress");
623 git
.arg(relative_path
).current_dir(&self.config
.src
);
626 // NOTE: doesn't use `try_run` because this shouldn't print an error if it fails.
627 if !update(true).status().map_or(false, |status
| status
.success()) {
628 self.run(&mut update(false));
631 self.run(Command
::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path
));
632 self.run(Command
::new("git").args(&["clean", "-qdfx"]).current_dir(absolute_path
));
635 /// If any submodule has been initialized already, sync it unconditionally.
636 /// This avoids contributors checking in a submodule change by accident.
637 pub fn maybe_update_submodules(&self) {
638 // Avoid running git when there isn't a git checkout.
639 if !self.config
.submodules(&self.rust_info
) {
645 .args(&["config", "--file"])
646 .arg(&self.config
.src
.join(".gitmodules"))
647 .args(&["--get-regexp", "path"]),
649 for line
in output
.lines() {
650 // Look for `submodule.$name.path = $path`
651 // Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
652 let submodule
= Path
::new(line
.splitn(2, ' '
).nth(1).unwrap());
653 // Don't update the submodule unless it's already been cloned.
654 if channel
::GitInfo
::new(false, submodule
).is_managed_git_subrepository() {
655 self.update_submodule(submodule
);
660 /// Executes the entire build, as configured by the flags and configuration.
661 pub fn build(&mut self) {
666 self.maybe_update_submodules();
668 if let Subcommand
::Format { check, paths }
= &self.config
.cmd
{
669 return format
::format(&builder
::Builder
::new(&self), *check
, &paths
);
672 if let Subcommand
::Clean { all }
= self.config
.cmd
{
673 return clean
::clean(self, all
);
676 if let Subcommand
::Setup { profile }
= &self.config
.cmd
{
677 return setup
::setup(&self.config
, *profile
);
680 // Download rustfmt early so that it can be used in rust-analyzer configs.
681 let _
= &builder
::Builder
::new(&self).initial_rustfmt();
684 let builder
= builder
::Builder
::new(&self);
685 if let Some(path
) = builder
.paths
.get(0) {
686 if path
== Path
::new("nonexistent/path/to/trigger/cargo/metadata") {
692 if !self.config
.dry_run
{
694 self.config
.dry_run
= true;
695 let builder
= builder
::Builder
::new(&self);
696 builder
.execute_cli();
698 self.config
.dry_run
= false;
699 let builder
= builder
::Builder
::new(&self);
700 builder
.execute_cli();
702 let builder
= builder
::Builder
::new(&self);
703 builder
.execute_cli();
706 // Check for postponed failures from `test --no-fail-fast`.
707 let failures
= self.delayed_failures
.borrow();
708 if failures
.len() > 0 {
709 eprintln
!("\n{} command(s) did not execute successfully:\n", failures
.len());
710 for failure
in failures
.iter() {
711 eprintln
!(" - {}\n", failure
);
716 #[cfg(feature = "build-metrics")]
717 self.metrics
.persist(self);
720 /// Clear out `dir` if `input` is newer.
722 /// After this executes, it will also ensure that `dir` exists.
723 fn clear_if_dirty(&self, dir
: &Path
, input
: &Path
) -> bool
{
724 let stamp
= dir
.join(".stamp");
725 let mut cleared
= false;
726 if mtime(&stamp
) < mtime(input
) {
727 self.verbose(&format
!("Dirty - {}", dir
.display()));
728 let _
= fs
::remove_dir_all(dir
);
730 } else if stamp
.exists() {
733 t
!(fs
::create_dir_all(dir
));
734 t
!(File
::create(stamp
));
738 /// Gets the space-separated set of activated features for the standard
740 fn std_features(&self, target
: TargetSelection
) -> String
{
741 let mut features
= "panic-unwind".to_string();
743 match self.config
.llvm_libunwind(target
) {
744 LlvmLibunwind
::InTree
=> features
.push_str(" llvm-libunwind"),
745 LlvmLibunwind
::System
=> features
.push_str(" system-llvm-libunwind"),
746 LlvmLibunwind
::No
=> {}
748 if self.config
.backtrace
{
749 features
.push_str(" backtrace");
751 if self.config
.profiler_enabled(target
) {
752 features
.push_str(" profiler");
757 /// Gets the space-separated set of activated features for the compiler.
758 fn rustc_features(&self, kind
: Kind
) -> String
{
759 let mut features
= vec
![];
760 if self.config
.jemalloc
{
761 features
.push("jemalloc");
763 if self.config
.llvm_enabled() || kind
== Kind
::Check
{
764 features
.push("llvm");
766 // keep in sync with `bootstrap/compile.rs:rustc_cargo_env`
767 if self.config
.rustc_parallel
{
768 features
.push("rustc_use_parallel_compiler");
771 // If debug logging is on, then we want the default for tracing:
772 // https://github.com/tokio-rs/tracing/blob/3dd5c03d907afdf2c39444a29931833335171554/tracing/src/level_filters.rs#L26
773 // which is everything (including debug/trace/etc.)
774 // if its unset, if debug_assertions is on, then debug_logging will also be on
775 // as well as tracing *ignoring* this feature when debug_assertions is on
776 if !self.config
.rust_debug_logging
{
777 features
.push("max_level_info");
783 /// Component directory that Cargo will produce output into (e.g.
785 fn cargo_dir(&self) -> &'
static str {
786 if self.config
.rust_optimize { "release" }
else { "debug" }
789 fn tools_dir(&self, compiler
: Compiler
) -> PathBuf
{
792 .join(&*compiler
.host
.triple
)
793 .join(format
!("stage{}-tools-bin", compiler
.stage
));
794 t
!(fs
::create_dir_all(&out
));
798 /// Returns the root directory for all output generated in a particular
799 /// stage when running with a particular host compiler.
801 /// The mode indicates what the root directory is for.
802 fn stage_out(&self, compiler
: Compiler
, mode
: Mode
) -> PathBuf
{
803 let suffix
= match mode
{
805 Mode
::Rustc
=> "-rustc",
806 Mode
::Codegen
=> "-codegen",
807 Mode
::ToolBootstrap
=> "-bootstrap-tools",
808 Mode
::ToolStd
| Mode
::ToolRustc
=> "-tools",
810 self.out
.join(&*compiler
.host
.triple
).join(format
!("stage{}{}", compiler
.stage
, suffix
))
813 /// Returns the root output directory for all Cargo output in a given stage,
814 /// running a particular compiler, whether or not we're building the
815 /// standard library, and targeting the specified architecture.
816 fn cargo_out(&self, compiler
: Compiler
, mode
: Mode
, target
: TargetSelection
) -> PathBuf
{
817 self.stage_out(compiler
, mode
).join(&*target
.triple
).join(self.cargo_dir())
820 /// Root output directory for LLVM compiled for `target`
822 /// Note that if LLVM is configured externally then the directory returned
823 /// will likely be empty.
824 fn llvm_out(&self, target
: TargetSelection
) -> PathBuf
{
825 self.out
.join(&*target
.triple
).join("llvm")
828 fn lld_out(&self, target
: TargetSelection
) -> PathBuf
{
829 self.out
.join(&*target
.triple
).join("lld")
832 /// Output directory for all documentation for a target
833 fn doc_out(&self, target
: TargetSelection
) -> PathBuf
{
834 self.out
.join(&*target
.triple
).join("doc")
837 /// Output directory for all JSON-formatted documentation for a target
838 fn json_doc_out(&self, target
: TargetSelection
) -> PathBuf
{
839 self.out
.join(&*target
.triple
).join("json-doc")
842 fn test_out(&self, target
: TargetSelection
) -> PathBuf
{
843 self.out
.join(&*target
.triple
).join("test")
846 /// Output directory for all documentation for a target
847 fn compiler_doc_out(&self, target
: TargetSelection
) -> PathBuf
{
848 self.out
.join(&*target
.triple
).join("compiler-doc")
851 /// Output directory for some generated md crate documentation for a target (temporary)
852 fn md_doc_out(&self, target
: TargetSelection
) -> Interned
<PathBuf
> {
853 INTERNER
.intern_path(self.out
.join(&*target
.triple
).join("md-doc"))
856 /// Returns `true` if no custom `llvm-config` is set for the specified target.
858 /// If no custom `llvm-config` was specified then Rust's llvm will be used.
859 fn is_rust_llvm(&self, target
: TargetSelection
) -> bool
{
860 match self.config
.target_config
.get(&target
) {
861 Some(Target { llvm_has_rust_patches: Some(patched), .. }
) => *patched
,
862 Some(Target { llvm_config, .. }
) => {
863 // If the user set llvm-config we assume Rust is not patched,
864 // but first check to see if it was configured by llvm-from-ci.
865 (self.config
.llvm_from_ci
&& target
== self.config
.build
) || llvm_config
.is_none()
871 /// Returns the path to `FileCheck` binary for the specified target
872 fn llvm_filecheck(&self, target
: TargetSelection
) -> PathBuf
{
873 let target_config
= self.config
.target_config
.get(&target
);
874 if let Some(s
) = target_config
.and_then(|c
| c
.llvm_filecheck
.as_ref()) {
876 } else if let Some(s
) = target_config
.and_then(|c
| c
.llvm_config
.as_ref()) {
877 let llvm_bindir
= output(Command
::new(s
).arg("--bindir"));
878 let filecheck
= Path
::new(llvm_bindir
.trim()).join(exe("FileCheck", target
));
879 if filecheck
.exists() {
882 // On Fedora the system LLVM installs FileCheck in the
883 // llvm subdirectory of the libdir.
884 let llvm_libdir
= output(Command
::new(s
).arg("--libdir"));
886 Path
::new(llvm_libdir
.trim()).join("llvm").join(exe("FileCheck", target
));
887 if lib_filecheck
.exists() {
890 // Return the most normal file name, even though
891 // it doesn't exist, so that any error message
897 let base
= self.llvm_out(target
).join("build");
898 let base
= if !self.ninja() && target
.contains("msvc") {
899 if self.config
.llvm_optimize
{
900 if self.config
.llvm_release_debuginfo
{
901 base
.join("RelWithDebInfo")
911 base
.join("bin").join(exe("FileCheck", target
))
915 /// Directory for libraries built from C/C++ code and shared between stages.
916 fn native_dir(&self, target
: TargetSelection
) -> PathBuf
{
917 self.out
.join(&*target
.triple
).join("native")
920 /// Root output directory for rust_test_helpers library compiled for
922 fn test_helpers_out(&self, target
: TargetSelection
) -> PathBuf
{
923 self.native_dir(target
).join("rust-test-helpers")
926 /// Adds the `RUST_TEST_THREADS` env var if necessary
927 fn add_rust_test_threads(&self, cmd
: &mut Command
) {
928 if env
::var_os("RUST_TEST_THREADS").is_none() {
929 cmd
.env("RUST_TEST_THREADS", self.jobs().to_string());
933 /// Returns the libdir of the snapshot compiler.
934 fn rustc_snapshot_libdir(&self) -> PathBuf
{
935 self.rustc_snapshot_sysroot().join(libdir(self.config
.build
))
938 /// Returns the sysroot of the snapshot compiler.
939 fn rustc_snapshot_sysroot(&self) -> &Path
{
940 static SYSROOT_CACHE
: OnceCell
<PathBuf
> = once_cell
::sync
::OnceCell
::new();
941 SYSROOT_CACHE
.get_or_init(|| {
942 let mut rustc
= Command
::new(&self.initial_rustc
);
943 rustc
.args(&["--print", "sysroot"]);
944 output(&mut rustc
).trim().into()
948 /// Runs a command, printing out nice contextual information if it fails.
949 fn run(&self, cmd
: &mut Command
) {
950 if self.config
.dry_run
{
953 self.verbose(&format
!("running: {:?}", cmd
));
954 run(cmd
, self.is_verbose())
957 /// Runs a command, printing out nice contextual information if it fails.
958 fn run_quiet(&self, cmd
: &mut Command
) {
959 if self.config
.dry_run
{
962 self.verbose(&format
!("running: {:?}", cmd
));
966 /// Runs a command, printing out nice contextual information if it fails.
967 /// Exits if the command failed to execute at all, otherwise returns its
968 /// `status.success()`.
969 fn try_run(&self, cmd
: &mut Command
) -> bool
{
970 if self.config
.dry_run
{
973 self.verbose(&format
!("running: {:?}", cmd
));
974 try_run(cmd
, self.is_verbose())
977 /// Runs a command, printing out nice contextual information if it fails.
978 /// Exits if the command failed to execute at all, otherwise returns its
979 /// `status.success()`.
980 fn try_run_quiet(&self, cmd
: &mut Command
) -> bool
{
981 if self.config
.dry_run
{
984 self.verbose(&format
!("running: {:?}", cmd
));
985 try_run_suppressed(cmd
)
988 /// Runs a command, printing out nice contextual information if it fails.
989 /// Returns false if do not execute at all, otherwise returns its
990 /// `status.success()`.
991 fn check_run(&self, cmd
: &mut Command
) -> bool
{
992 if self.config
.dry_run
{
995 self.verbose(&format
!("running: {:?}", cmd
));
996 check_run(cmd
, self.is_verbose())
999 pub fn is_verbose(&self) -> bool
{
1003 /// Prints a message if this build is configured in verbose mode.
1004 fn verbose(&self, msg
: &str) {
1005 if self.is_verbose() {
1006 println
!("{}", msg
);
1010 pub fn is_verbose_than(&self, level
: usize) -> bool
{
1011 self.verbosity
> level
1014 /// Prints a message if this build is configured in more verbose mode than `level`.
1015 fn verbose_than(&self, level
: usize, msg
: &str) {
1016 if self.is_verbose_than(level
) {
1017 println
!("{}", msg
);
1021 fn info(&self, msg
: &str) {
1022 if self.config
.dry_run
{
1025 println
!("{}", msg
);
1028 /// Returns the number of parallel jobs that have been configured for this
1030 fn jobs(&self) -> u32 {
1031 self.config
.jobs
.unwrap_or_else(|| {
1032 std
::thread
::available_parallelism().map_or(1, std
::num
::NonZeroUsize
::get
) as u32
1036 fn debuginfo_map_to(&self, which
: GitRepo
) -> Option
<String
> {
1037 if !self.config
.rust_remap_debuginfo
{
1043 let sha
= self.rust_sha().unwrap_or(&self.version
);
1044 Some(format
!("/rustc/{}", sha
))
1046 GitRepo
::Llvm
=> Some(String
::from("/rustc/llvm")),
1050 /// Returns the path to the C compiler for the target specified.
1051 fn cc(&self, target
: TargetSelection
) -> &Path
{
1052 self.cc
[&target
].path()
1055 /// Returns a list of flags to pass to the C compiler for the target
1057 fn cflags(&self, target
: TargetSelection
, which
: GitRepo
, c
: CLang
) -> Vec
<String
> {
1058 let base
= match c
{
1059 CLang
::C
=> &self.cc
[&target
],
1060 CLang
::Cxx
=> &self.cxx
[&target
],
1063 // Filter out -O and /O (the optimization flags) that we picked up from
1064 // cc-rs because the build scripts will determine that for themselves.
1068 .map(|s
| s
.to_string_lossy().into_owned())
1069 .filter(|s
| !s
.starts_with("-O") && !s
.starts_with("/O"))
1070 .collect
::<Vec
<String
>>();
1072 // If we're compiling on macOS then we add a few unconditional flags
1073 // indicating that we want libc++ (more filled out than libstdc++) and
1074 // we want to compile for 10.7. This way we can ensure that
1075 // LLVM/etc are all properly compiled.
1076 if target
.contains("apple-darwin") {
1077 base
.push("-stdlib=libc++".into());
1080 // Work around an apparently bad MinGW / GCC optimization,
1081 // See: https://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html
1082 // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936
1083 if &*target
.triple
== "i686-pc-windows-gnu" {
1084 base
.push("-fno-omit-frame-pointer".into());
1087 if let Some(map_to
) = self.debuginfo_map_to(which
) {
1088 let map
= format
!("{}={}", self.src
.display(), map_to
);
1089 let cc
= self.cc(target
);
1090 if cc
.ends_with("clang") || cc
.ends_with("gcc") {
1091 base
.push(format
!("-fdebug-prefix-map={}", map
));
1092 } else if cc
.ends_with("clang-cl.exe") {
1093 base
.push("-Xclang".into());
1094 base
.push(format
!("-fdebug-prefix-map={}", map
));
1100 /// Returns the path to the `ar` archive utility for the target specified.
1101 fn ar(&self, target
: TargetSelection
) -> Option
<&Path
> {
1102 self.ar
.get(&target
).map(|p
| &**p
)
1105 /// Returns the path to the `ranlib` utility for the target specified.
1106 fn ranlib(&self, target
: TargetSelection
) -> Option
<&Path
> {
1107 self.ranlib
.get(&target
).map(|p
| &**p
)
1110 /// Returns the path to the C++ compiler for the target specified.
1111 fn cxx(&self, target
: TargetSelection
) -> Result
<&Path
, String
> {
1112 match self.cxx
.get(&target
) {
1113 Some(p
) => Ok(p
.path()),
1115 Err(format
!("target `{}` is not configured as a host, only as a target", target
))
1120 /// Returns the path to the linker for the given target if it needs to be overridden.
1121 fn linker(&self, target
: TargetSelection
) -> Option
<&Path
> {
1122 if let Some(linker
) = self.config
.target_config
.get(&target
).and_then(|c
| c
.linker
.as_ref())
1125 } else if target
.contains("vxworks") {
1126 // need to use CXX compiler as linker to resolve the exception functions
1127 // that are only existed in CXX libraries
1128 Some(self.cxx
[&target
].path())
1129 } else if target
!= self.config
.build
1130 && util
::use_host_linker(target
)
1131 && !target
.contains("msvc")
1133 Some(self.cc(target
))
1134 } else if self.config
.use_lld
&& !self.is_fuse_ld_lld(target
) && self.build
== target
{
1135 Some(&self.initial_lld
)
1141 // LLD is used through `-fuse-ld=lld` rather than directly.
1142 // Only MSVC targets use LLD directly at the moment.
1143 fn is_fuse_ld_lld(&self, target
: TargetSelection
) -> bool
{
1144 self.config
.use_lld
&& !target
.contains("msvc")
1147 fn lld_flags(&self, target
: TargetSelection
) -> impl Iterator
<Item
= String
> {
1148 let mut options
= [None
, None
];
1150 if self.config
.use_lld
{
1151 if self.is_fuse_ld_lld(target
) {
1152 options
[0] = Some("-Clink-arg=-fuse-ld=lld".to_string());
1155 let threads
= if target
.contains("windows") { "/threads:1" }
else { "--threads=1" }
;
1156 options
[1] = Some(format
!("-Clink-arg=-Wl,{}", threads
));
1159 IntoIterator
::into_iter(options
).flatten()
1162 /// Returns if this target should statically link the C runtime, if specified
1163 fn crt_static(&self, target
: TargetSelection
) -> Option
<bool
> {
1164 if target
.contains("pc-windows-msvc") {
1167 self.config
.target_config
.get(&target
).and_then(|t
| t
.crt_static
)
1171 /// Returns the "musl root" for this `target`, if defined
1172 fn musl_root(&self, target
: TargetSelection
) -> Option
<&Path
> {
1176 .and_then(|t
| t
.musl_root
.as_ref())
1177 .or_else(|| self.config
.musl_root
.as_ref())
1181 /// Returns the "musl libdir" for this `target`.
1182 fn musl_libdir(&self, target
: TargetSelection
) -> Option
<PathBuf
> {
1183 let t
= self.config
.target_config
.get(&target
)?
;
1184 if let libdir @
Some(_
) = &t
.musl_libdir
{
1185 return libdir
.clone();
1187 self.musl_root(target
).map(|root
| root
.join("lib"))
1190 /// Returns the sysroot for the wasi target, if defined
1191 fn wasi_root(&self, target
: TargetSelection
) -> Option
<&Path
> {
1192 self.config
.target_config
.get(&target
).and_then(|t
| t
.wasi_root
.as_ref()).map(|p
| &**p
)
1195 /// Returns `true` if this is a no-std `target`, if defined
1196 fn no_std(&self, target
: TargetSelection
) -> Option
<bool
> {
1197 self.config
.target_config
.get(&target
).map(|t
| t
.no_std
)
1200 /// Returns `true` if the target will be tested using the `remote-test-client`
1201 /// and `remote-test-server` binaries.
1202 fn remote_tested(&self, target
: TargetSelection
) -> bool
{
1203 self.qemu_rootfs(target
).is_some()
1204 || target
.contains("android")
1205 || env
::var_os("TEST_DEVICE_ADDR").is_some()
1208 /// Returns the root of the "rootfs" image that this target will be using,
1209 /// if one was configured.
1211 /// If `Some` is returned then that means that tests for this target are
1212 /// emulated with QEMU and binaries will need to be shipped to the emulator.
1213 fn qemu_rootfs(&self, target
: TargetSelection
) -> Option
<&Path
> {
1214 self.config
.target_config
.get(&target
).and_then(|t
| t
.qemu_rootfs
.as_ref()).map(|p
| &**p
)
1217 /// Path to the python interpreter to use
1218 fn python(&self) -> &Path
{
1219 if self.config
.build
.ends_with("apple-darwin") {
1220 // Force /usr/bin/python3 on macOS for LLDB tests because we're loading the
1221 // LLDB plugin's compiled module which only works with the system python
1222 // (namely not Homebrew-installed python)
1223 Path
::new("/usr/bin/python3")
1228 .expect("python is required for running LLDB or rustdoc tests")
1232 /// Temporary directory that extended error information is emitted to.
1233 fn extended_error_dir(&self) -> PathBuf
{
1234 self.out
.join("tmp/extended-error-metadata")
1237 /// Tests whether the `compiler` compiling for `target` should be forced to
1238 /// use a stage1 compiler instead.
1240 /// Currently, by default, the build system does not perform a "full
1241 /// bootstrap" by default where we compile the compiler three times.
1242 /// Instead, we compile the compiler two times. The final stage (stage2)
1243 /// just copies the libraries from the previous stage, which is what this
1246 /// Here we return `true` if:
1248 /// * The build isn't performing a full bootstrap
1249 /// * The `compiler` is in the final stage, 2
1250 /// * We're not cross-compiling, so the artifacts are already available in
1253 /// When all of these conditions are met the build will lift artifacts from
1254 /// the previous stage forward.
1255 fn force_use_stage1(&self, compiler
: Compiler
, target
: TargetSelection
) -> bool
{
1256 !self.config
.full_bootstrap
1257 && compiler
.stage
>= 2
1258 && (self.hosts
.iter().any(|h
| *h
== target
) || target
== self.build
)
1261 /// Given `num` in the form "a.b.c" return a "release string" which
1262 /// describes the release version number.
1264 /// For example on nightly this returns "a.b.c-nightly", on beta it returns
1265 /// "a.b.c-beta.1" and on stable it just returns "a.b.c".
1266 fn release(&self, num
: &str) -> String
{
1267 match &self.config
.channel
[..] {
1268 "stable" => num
.to_string(),
1270 if self.rust_info
.is_managed_git_subrepository() && !self.config
.ignore_git
{
1271 format
!("{}-beta.{}", num
, self.beta_prerelease_version())
1273 format
!("{}-beta", num
)
1276 "nightly" => format
!("{}-nightly", num
),
1277 _
=> format
!("{}-dev", num
),
1281 fn beta_prerelease_version(&self) -> u32 {
1282 if let Some(s
) = self.prerelease_version
.get() {
1286 // Figure out how many merge commits happened since we branched off master.
1287 // That's our beta number!
1288 // (Note that we use a `..` range, not the `...` symmetric difference.)
1290 output(self.config
.git().arg("rev-list").arg("--count").arg("--merges").arg(format
!(
1291 "refs/remotes/origin/{}..HEAD",
1292 self.config
.stage0_metadata
.config
.nightly_branch
1294 let n
= count
.trim().parse().unwrap();
1295 self.prerelease_version
.set(Some(n
));
1299 /// Returns the value of `release` above for Rust itself.
1300 fn rust_release(&self) -> String
{
1301 self.release(&self.version
)
1304 /// Returns the "package version" for a component given the `num` release
1307 /// The package version is typically what shows up in the names of tarballs.
1308 /// For channels like beta/nightly it's just the channel name, otherwise
1309 /// it's the `num` provided.
1310 fn package_vers(&self, num
: &str) -> String
{
1311 match &self.config
.channel
[..] {
1312 "stable" => num
.to_string(),
1313 "beta" => "beta".to_string(),
1314 "nightly" => "nightly".to_string(),
1315 _
=> format
!("{}-dev", num
),
1319 /// Returns the value of `package_vers` above for Rust itself.
1320 fn rust_package_vers(&self) -> String
{
1321 self.package_vers(&self.version
)
1324 /// Returns the `version` string associated with this compiler for Rust
1327 /// Note that this is a descriptive string which includes the commit date,
1328 /// sha, version, etc.
1329 fn rust_version(&self) -> String
{
1330 let mut version
= self.rust_info
.version(self, &self.version
);
1331 if let Some(ref s
) = self.config
.description
{
1332 version
.push_str(" (");
1333 version
.push_str(s
);
1339 /// Returns the full commit hash.
1340 fn rust_sha(&self) -> Option
<&str> {
1341 self.rust_info
.sha()
1344 /// Returns the `a.b.c` version that the given package is at.
1345 fn release_num(&self, package
: &str) -> String
{
1346 let toml_file_name
= self.src
.join(&format
!("src/tools/{}/Cargo.toml", package
));
1347 let toml
= t
!(fs
::read_to_string(&toml_file_name
));
1348 for line
in toml
.lines() {
1349 if let Some(stripped
) =
1350 line
.strip_prefix("version = \"").and_then(|s
| s
.strip_suffix("\""))
1352 return stripped
.to_owned();
1356 panic
!("failed to find version in {}'s Cargo.toml", package
)
1359 /// Returns `true` if unstable features should be enabled for the compiler
1361 fn unstable_features(&self) -> bool
{
1362 match &self.config
.channel
[..] {
1363 "stable" | "beta" => false,
1364 "nightly" | _
=> true,
1368 /// Returns a Vec of all the dependencies of the given root crate,
1369 /// including transitive dependencies and the root itself. Only includes
1370 /// "local" crates (those in the local source tree, not from a registry).
1371 fn in_tree_crates(&self, root
: &str, target
: Option
<TargetSelection
>) -> Vec
<&Crate
> {
1372 let mut ret
= Vec
::new();
1373 let mut list
= vec
![INTERNER
.intern_str(root
)];
1374 let mut visited
= HashSet
::new();
1375 while let Some(krate
) = list
.pop() {
1376 let krate
= &self.crates
[&krate
];
1378 for dep
in &krate
.deps
{
1379 if !self.crates
.contains_key(dep
) {
1380 // Ignore non-workspace members.
1383 // Don't include optional deps if their features are not
1384 // enabled. Ideally this would be computed from `cargo
1385 // metadata --features …`, but that is somewhat slow. In
1386 // the future, we may want to consider just filtering all
1387 // build and dev dependencies in metadata::build.
1388 if visited
.insert(dep
)
1389 && (dep
!= "profiler_builtins"
1391 .map(|t
| self.config
.profiler_enabled(t
))
1392 .unwrap_or_else(|| self.config
.any_profiler_enabled()))
1393 && (dep
!= "rustc_codegen_llvm" || self.config
.llvm_enabled())
1402 fn read_stamp_file(&self, stamp
: &Path
) -> Vec
<(PathBuf
, DependencyType
)> {
1403 if self.config
.dry_run
{
1407 let mut paths
= Vec
::new();
1408 let contents
= t
!(fs
::read(stamp
), &stamp
);
1409 // This is the method we use for extracting paths from the stamp file passed to us. See
1410 // run_cargo for more information (in compile.rs).
1411 for part
in contents
.split(|b
| *b
== 0) {
1412 if part
.is_empty() {
1415 let dependency_type
= match part
[0] as char {
1416 'h'
=> DependencyType
::Host
,
1417 's'
=> DependencyType
::TargetSelfContained
,
1418 't'
=> DependencyType
::Target
,
1419 _
=> unreachable
!(),
1421 let path
= PathBuf
::from(t
!(str::from_utf8(&part
[1..])));
1422 paths
.push((path
, dependency_type
));
1427 /// Create a temporary directory in `out` and return its path.
1429 /// NOTE: this temporary directory is shared between all steps;
1430 /// if you need an empty directory, create a new subdirectory inside it.
1431 fn tempdir(&self) -> PathBuf
{
1432 let tmp
= self.out
.join("tmp");
1433 t
!(fs
::create_dir_all(&tmp
));
1437 /// Copies a file from `src` to `dst`
1438 pub fn copy(&self, src
: &Path
, dst
: &Path
) {
1439 self.copy_internal(src
, dst
, false);
1442 fn copy_internal(&self, src
: &Path
, dst
: &Path
, dereference_symlinks
: bool
) {
1443 if self.config
.dry_run
{
1446 self.verbose_than(1, &format
!("Copy {:?} to {:?}", src
, dst
));
1450 let _
= fs
::remove_file(&dst
);
1451 let metadata
= t
!(src
.symlink_metadata());
1452 let mut src
= src
.to_path_buf();
1453 if metadata
.file_type().is_symlink() {
1454 if dereference_symlinks
{
1455 src
= t
!(fs
::canonicalize(src
));
1457 let link
= t
!(fs
::read_link(src
));
1458 t
!(self.symlink_file(link
, dst
));
1462 if let Ok(()) = fs
::hard_link(&src
, dst
) {
1463 // Attempt to "easy copy" by creating a hard link
1464 // (symlinks don't work on windows), but if that fails
1465 // just fall back to a slow `copy` operation.
1467 if let Err(e
) = fs
::copy(&src
, dst
) {
1468 panic
!("failed to copy `{}` to `{}`: {}", src
.display(), dst
.display(), e
)
1470 t
!(fs
::set_permissions(dst
, metadata
.permissions()));
1471 let atime
= FileTime
::from_last_access_time(&metadata
);
1472 let mtime
= FileTime
::from_last_modification_time(&metadata
);
1473 t
!(filetime
::set_file_times(dst
, atime
, mtime
));
1477 /// Copies the `src` directory recursively to `dst`. Both are assumed to exist
1478 /// when this function is called.
1479 pub fn cp_r(&self, src
: &Path
, dst
: &Path
) {
1480 if self.config
.dry_run
{
1483 for f
in self.read_dir(src
) {
1484 let path
= f
.path();
1485 let name
= path
.file_name().unwrap();
1486 let dst
= dst
.join(name
);
1487 if t
!(f
.file_type()).is_dir() {
1488 t
!(fs
::create_dir_all(&dst
));
1489 self.cp_r(&path
, &dst
);
1491 let _
= fs
::remove_file(&dst
);
1492 self.copy(&path
, &dst
);
1497 /// Copies the `src` directory recursively to `dst`. Both are assumed to exist
1498 /// when this function is called. Unwanted files or directories can be skipped
1499 /// by returning `false` from the filter function.
1500 pub fn cp_filtered(&self, src
: &Path
, dst
: &Path
, filter
: &dyn Fn(&Path
) -> bool
) {
1501 // Immediately recurse with an empty relative path
1502 self.recurse_(src
, dst
, Path
::new(""), filter
)
1505 // Inner function does the actual work
1506 fn recurse_(&self, src
: &Path
, dst
: &Path
, relative
: &Path
, filter
: &dyn Fn(&Path
) -> bool
) {
1507 for f
in self.read_dir(src
) {
1508 let path
= f
.path();
1509 let name
= path
.file_name().unwrap();
1510 let dst
= dst
.join(name
);
1511 let relative
= relative
.join(name
);
1512 // Only copy file or directory if the filter function returns true
1513 if filter(&relative
) {
1514 if t
!(f
.file_type()).is_dir() {
1515 let _
= fs
::remove_dir_all(&dst
);
1516 self.create_dir(&dst
);
1517 self.recurse_(&path
, &dst
, &relative
, filter
);
1519 let _
= fs
::remove_file(&dst
);
1520 self.copy(&path
, &dst
);
1526 fn copy_to_folder(&self, src
: &Path
, dest_folder
: &Path
) {
1527 let file_name
= src
.file_name().unwrap();
1528 let dest
= dest_folder
.join(file_name
);
1529 self.copy(src
, &dest
);
1532 fn install(&self, src
: &Path
, dstdir
: &Path
, perms
: u32) {
1533 if self.config
.dry_run
{
1536 let dst
= dstdir
.join(src
.file_name().unwrap());
1537 self.verbose_than(1, &format
!("Install {:?} to {:?}", src
, dst
));
1538 t
!(fs
::create_dir_all(dstdir
));
1540 panic
!("Error: File \"{}\" not found!", src
.display());
1542 self.copy_internal(src
, &dst
, true);
1546 fn create(&self, path
: &Path
, s
: &str) {
1547 if self.config
.dry_run
{
1550 t
!(fs
::write(path
, s
));
1553 fn read(&self, path
: &Path
) -> String
{
1554 if self.config
.dry_run
{
1555 return String
::new();
1557 t
!(fs
::read_to_string(path
))
1560 fn create_dir(&self, dir
: &Path
) {
1561 if self.config
.dry_run
{
1564 t
!(fs
::create_dir_all(dir
))
1567 fn remove_dir(&self, dir
: &Path
) {
1568 if self.config
.dry_run
{
1571 t
!(fs
::remove_dir_all(dir
))
1574 fn read_dir(&self, dir
: &Path
) -> impl Iterator
<Item
= fs
::DirEntry
> {
1575 let iter
= match fs
::read_dir(dir
) {
1577 Err(_
) if self.config
.dry_run
=> return vec
![].into_iter(),
1578 Err(err
) => panic
!("could not read dir {:?}: {:?}", dir
, err
),
1580 iter
.map(|e
| t
!(e
)).collect
::<Vec
<_
>>().into_iter()
1583 fn symlink_file
<P
: AsRef
<Path
>, Q
: AsRef
<Path
>>(&self, src
: P
, link
: Q
) -> io
::Result
<()> {
1585 use std
::os
::unix
::fs
::symlink
as symlink_file
;
1587 use std
::os
::windows
::fs
::symlink_file
;
1588 if !self.config
.dry_run { symlink_file(src.as_ref(), link.as_ref()) }
else { Ok(()) }
1591 fn remove(&self, f
: &Path
) {
1592 if self.config
.dry_run
{
1595 fs
::remove_file(f
).unwrap_or_else(|_
| panic
!("failed to remove {:?}", f
));
1598 /// Returns if config.ninja is enabled, and checks for ninja existence,
1599 /// exiting with a nicer error message if not.
1600 fn ninja(&self) -> bool
{
1601 let mut cmd_finder
= crate::sanity
::Finder
::new();
1603 if self.config
.ninja_in_file
{
1604 // Some Linux distros rename `ninja` to `ninja-build`.
1605 // CMake can work with either binary name.
1606 if cmd_finder
.maybe_have("ninja-build").is_none()
1607 && cmd_finder
.maybe_have("ninja").is_none()
1611 Couldn't find required command: ninja (or ninja-build)
1613 You should install ninja as described at
1614 <https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages>,
1615 or set `ninja = false` in the `[llvm]` section of `config.toml`.
1616 Alternatively, set `download-ci-llvm = true` in that `[llvm]` section
1617 to download LLVM rather than building it.
1624 // If ninja isn't enabled but we're building for MSVC then we try
1625 // doubly hard to enable it. It was realized in #43767 that the msbuild
1626 // CMake generator for MSVC doesn't respect configuration options like
1627 // disabling LLVM assertions, which can often be quite important!
1629 // In these cases we automatically enable Ninja if we find it in the
1631 if !self.config
.ninja_in_file
&& self.config
.build
.contains("msvc") {
1632 if cmd_finder
.maybe_have("ninja").is_some() {
1637 self.config
.ninja_in_file
1642 fn chmod(path
: &Path
, perms
: u32) {
1643 use std
::os
::unix
::fs
::*;
1644 t
!(fs
::set_permissions(path
, fs
::Permissions
::from_mode(perms
)));
1647 fn chmod(_path
: &Path
, _perms
: u32) {}
1649 /// If code is not 0 (successful exit status), exit status is 101 (rust's default error code.)
1650 /// If the test is running and code is an error code, it will cause a panic.
1651 fn detail_exit(code
: i32) -> ! {
1652 // if in test and code is an error code, panic with status code provided
1653 if cfg
!(test
) && code
!= 0 {
1654 panic
!("status code: {}", code
);
1656 //otherwise,exit with provided status code
1657 std
::process
::exit(code
);
1662 pub fn with_stage(mut self, stage
: u32) -> Compiler
{
1667 /// Returns `true` if this is a snapshot compiler for `build`'s configuration
1668 pub fn is_snapshot(&self, build
: &Build
) -> bool
{
1669 self.stage
== 0 && self.host
== build
.build
1672 /// Returns if this compiler should be treated as a final stage one in the
1673 /// current build session.
1674 /// This takes into account whether we're performing a full bootstrap or
1675 /// not; don't directly compare the stage with `2`!
1676 pub fn is_final_stage(&self, build
: &Build
) -> bool
{
1677 let final_stage
= if build
.config
.full_bootstrap { 2 }
else { 1 }
;
1678 self.stage
>= final_stage
1682 fn envify(s
: &str) -> String
{
1688 .flat_map(|c
| c
.to_uppercase())