3 use std
::path
::{Path, PathBuf}
;
4 use std
::process
::Command
;
6 use crate::build_sysroot
::STDLIB_SRC
;
7 use crate::path
::{Dirs, RelPath}
;
8 use crate::rustc_info
::get_default_sysroot
;
10 copy_dir_recursively
, git_command
, remove_dir_if_exists
, retry_spawn_and_wait
, spawn_and_wait
,
13 pub(crate) fn prepare(dirs
: &Dirs
) {
14 RelPath
::DOWNLOAD
.ensure_exists(dirs
);
15 crate::tests
::RAND_REPO
.fetch(dirs
);
16 crate::tests
::REGEX_REPO
.fetch(dirs
);
17 crate::tests
::PORTABLE_SIMD_REPO
.fetch(dirs
);
20 pub(crate) fn prepare_stdlib(dirs
: &Dirs
, rustc
: &Path
) {
21 let sysroot_src_orig
= get_default_sysroot(rustc
).join("lib/rustlib/src/rust");
22 assert
!(sysroot_src_orig
.exists());
24 apply_patches(dirs
, "stdlib", &sysroot_src_orig
, &STDLIB_SRC
.to_path(dirs
));
27 STDLIB_SRC
.to_path(dirs
).join("Cargo.toml"),
31 members = ["./library/sysroot"]
34 rustc-std-workspace-core = { path = "./library/rustc-std-workspace-core" }
35 rustc-std-workspace-alloc = { path = "./library/rustc-std-workspace-alloc" }
36 rustc-std-workspace-std = { path = "./library/rustc-std-workspace-std" }
38 # Mandatory for correctly compiling compiler-builtins
39 [profile.dev.package.compiler_builtins]
40 debug-assertions = false
41 overflow-checks = false
44 [profile.release.package.compiler_builtins]
45 debug-assertions = false
46 overflow-checks = false
52 let source_lockfile
= RelPath
::PATCHES
.to_path(dirs
).join("stdlib-lock.toml");
53 let target_lockfile
= STDLIB_SRC
.to_path(dirs
).join("Cargo.lock");
54 fs
::copy(source_lockfile
, target_lockfile
).unwrap();
57 pub(crate) struct GitRepo
{
60 content_hash
: &'
static str,
61 patch_name
: &'
static str,
65 Github { user: &'static str, repo: &'static str }
,
68 // Note: This uses a hasher which is not cryptographically secure. This is fine as the hash is meant
69 // to protect against accidental modification and outdated downloads, not against manipulation.
70 fn hash_file(file
: &std
::path
::Path
) -> u64 {
71 let contents
= std
::fs
::read(file
).unwrap();
73 let mut hasher
= std
::hash
::SipHasher
::new();
74 std
::hash
::Hash
::hash(&contents
, &mut hasher
);
75 std
::hash
::Hasher
::finish(&hasher
)
78 fn hash_dir(dir
: &std
::path
::Path
) -> u64 {
79 let mut sub_hashes
= std
::collections
::BTreeMap
::new();
80 for entry
in std
::fs
::read_dir(dir
).unwrap() {
81 let entry
= entry
.unwrap();
82 if entry
.file_type().unwrap().is_dir() {
84 .insert(entry
.file_name().to_str().unwrap().to_owned(), hash_dir(&entry
.path()));
87 .insert(entry
.file_name().to_str().unwrap().to_owned(), hash_file(&entry
.path()));
91 let mut hasher
= std
::hash
::SipHasher
::new();
92 std
::hash
::Hash
::hash(&sub_hashes
, &mut hasher
);
93 std
::hash
::Hasher
::finish(&hasher
)
97 pub(crate) const fn github(
101 content_hash
: &'
static str,
102 patch_name
: &'
static str,
104 GitRepo { url: GitRepoUrl::Github { user, repo }
, rev
, content_hash
, patch_name
}
107 fn download_dir(&self, dirs
: &Dirs
) -> PathBuf
{
109 GitRepoUrl
::Github { user: _, repo }
=> RelPath
::DOWNLOAD
.join(repo
).to_path(dirs
),
113 pub(crate) const fn source_dir(&self) -> RelPath
{
115 GitRepoUrl
::Github { user: _, repo }
=> RelPath
::BUILD
.join(repo
),
119 pub(crate) fn fetch(&self, dirs
: &Dirs
) {
120 let download_dir
= self.download_dir(dirs
);
122 if download_dir
.exists() {
123 let actual_hash
= format
!("{:016x}", hash_dir(&download_dir
));
124 if actual_hash
== self.content_hash
{
125 eprintln
!("[FRESH] {}", download_dir
.display());
129 "Mismatched content hash for {download_dir}: {actual_hash} != {content_hash}. Downloading again.",
130 download_dir
= download_dir
.display(),
131 content_hash
= self.content_hash
,
137 GitRepoUrl
::Github { user, repo }
=> {
138 clone_repo_shallow_github(dirs
, &download_dir
, user
, repo
, self.rev
);
142 let source_lockfile
=
143 RelPath
::PATCHES
.to_path(dirs
).join(format
!("{}-lock.toml", self.patch_name
));
144 let target_lockfile
= download_dir
.join("Cargo.lock");
145 if source_lockfile
.exists() {
146 assert
!(!target_lockfile
.exists());
147 fs
::copy(source_lockfile
, target_lockfile
).unwrap();
149 assert
!(target_lockfile
.exists());
152 let actual_hash
= format
!("{:016x}", hash_dir(&download_dir
));
153 if actual_hash
!= self.content_hash
{
155 "Download of {download_dir} failed with mismatched content hash: {actual_hash} != {content_hash}",
156 download_dir
= download_dir
.display(),
157 content_hash
= self.content_hash
,
159 std
::process
::exit(1);
163 pub(crate) fn patch(&self, dirs
: &Dirs
) {
167 &self.download_dir(dirs
),
168 &self.source_dir().to_path(dirs
),
174 fn clone_repo(download_dir
: &Path
, repo
: &str, rev
: &str) {
175 eprintln
!("[CLONE] {}", repo
);
176 // Ignore exit code as the repo may already have been checked out
177 git_command(None
, "clone").arg(repo
).arg(download_dir
).spawn().unwrap().wait().unwrap();
179 let mut clean_cmd
= git_command(download_dir
, "checkout");
180 clean_cmd
.arg("--").arg(".");
181 spawn_and_wait(clean_cmd
);
183 let mut checkout_cmd
= git_command(download_dir
, "checkout");
184 checkout_cmd
.arg("-q").arg(rev
);
185 spawn_and_wait(checkout_cmd
);
187 std
::fs
::remove_dir_all(download_dir
.join(".git")).unwrap();
190 fn clone_repo_shallow_github(dirs
: &Dirs
, download_dir
: &Path
, user
: &str, repo
: &str, rev
: &str) {
192 // Older windows doesn't have tar or curl by default. Fall back to using git.
193 clone_repo(download_dir
, &format
!("https://github.com/{}/{}.git", user
, repo
), rev
);
197 let archive_url
= format
!("https://github.com/{}/{}/archive/{}.tar.gz", user
, repo
, rev
);
198 let archive_file
= RelPath
::DOWNLOAD
.to_path(dirs
).join(format
!("{}.tar.gz", rev
));
199 let archive_dir
= RelPath
::DOWNLOAD
.to_path(dirs
).join(format
!("{}-{}", repo
, rev
));
201 eprintln
!("[DOWNLOAD] {}/{} from {}", user
, repo
, archive_url
);
203 // Remove previous results if they exists
204 let _
= std
::fs
::remove_file(&archive_file
);
205 let _
= std
::fs
::remove_dir_all(&archive_dir
);
206 let _
= std
::fs
::remove_dir_all(&download_dir
);
208 // Download zip archive
209 let mut download_cmd
= Command
::new("curl");
217 .arg("--connect-timeout")
219 .arg("--continue-at")
225 retry_spawn_and_wait(5, download_cmd
);
227 // Unpack tar archive
228 let mut unpack_cmd
= Command
::new("tar");
229 unpack_cmd
.arg("xf").arg(&archive_file
).current_dir(RelPath
::DOWNLOAD
.to_path(dirs
));
230 spawn_and_wait(unpack_cmd
);
232 // Rename unpacked dir to the expected name
233 std
::fs
::rename(archive_dir
, &download_dir
).unwrap();
236 std
::fs
::remove_file(archive_file
).unwrap();
239 fn init_git_repo(repo_dir
: &Path
) {
240 let mut git_init_cmd
= git_command(repo_dir
, "init");
241 git_init_cmd
.arg("-q");
242 spawn_and_wait(git_init_cmd
);
244 let mut git_add_cmd
= git_command(repo_dir
, "add");
245 git_add_cmd
.arg(".");
246 spawn_and_wait(git_add_cmd
);
248 let mut git_commit_cmd
= git_command(repo_dir
, "commit");
249 git_commit_cmd
.arg("-m").arg("Initial commit").arg("-q");
250 spawn_and_wait(git_commit_cmd
);
253 fn get_patches(dirs
: &Dirs
, crate_name
: &str) -> Vec
<PathBuf
> {
254 let mut patches
: Vec
<_
> = fs
::read_dir(RelPath
::PATCHES
.to_path(dirs
))
256 .map(|entry
| entry
.unwrap().path())
257 .filter(|path
| path
.extension() == Some(OsStr
::new("patch")))
266 .starts_with(crate_name
)
273 pub(crate) fn apply_patches(dirs
: &Dirs
, crate_name
: &str, source_dir
: &Path
, target_dir
: &Path
) {
274 // FIXME avoid copy and patch if src, patches and target are unchanged
276 eprintln
!("[COPY] {crate_name} source");
278 remove_dir_if_exists(target_dir
);
279 fs
::create_dir_all(target_dir
).unwrap();
280 if crate_name
== "stdlib" {
281 fs
::create_dir(target_dir
.join("library")).unwrap();
282 copy_dir_recursively(&source_dir
.join("library"), &target_dir
.join("library"));
284 copy_dir_recursively(source_dir
, target_dir
);
287 init_git_repo(target_dir
);
289 if crate_name
== "<none>" {
293 for patch
in get_patches(dirs
, crate_name
) {
295 "[PATCH] {:?} <- {:?}",
296 target_dir
.file_name().unwrap(),
297 patch
.file_name().unwrap()
299 let mut apply_patch_cmd
= git_command(target_dir
, "am");
300 apply_patch_cmd
.arg(patch
).arg("-q");
301 spawn_and_wait(apply_patch_cmd
);