]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_codegen_cranelift/build_system/prepare.rs
New upstream version 1.75.0+dfsg1
[rustc.git] / compiler / rustc_codegen_cranelift / build_system / prepare.rs
1 use std::ffi::OsStr;
2 use std::fs;
3 use std::path::{Path, PathBuf};
4 use std::process::Command;
5
6 use crate::build_sysroot::STDLIB_SRC;
7 use crate::path::{Dirs, RelPath};
8 use crate::rustc_info::get_default_sysroot;
9 use crate::utils::{
10 copy_dir_recursively, git_command, remove_dir_if_exists, retry_spawn_and_wait, spawn_and_wait,
11 };
12
13 pub(crate) fn prepare(dirs: &Dirs) {
14 RelPath::DOWNLOAD.ensure_exists(dirs);
15 crate::tests::RAND_REPO.fetch(dirs);
16 crate::tests::REGEX_REPO.fetch(dirs);
17 crate::tests::PORTABLE_SIMD_REPO.fetch(dirs);
18 }
19
20 pub(crate) fn prepare_stdlib(dirs: &Dirs, rustc: &Path) {
21 let sysroot_src_orig = get_default_sysroot(rustc).join("lib/rustlib/src/rust");
22 assert!(sysroot_src_orig.exists());
23
24 apply_patches(dirs, "stdlib", &sysroot_src_orig, &STDLIB_SRC.to_path(dirs));
25
26 std::fs::write(
27 STDLIB_SRC.to_path(dirs).join("Cargo.toml"),
28 r#"
29 [workspace]
30 resolver = "1"
31 members = ["./library/sysroot"]
32
33 [patch.crates-io]
34 rustc-std-workspace-core = { path = "./library/rustc-std-workspace-core" }
35 rustc-std-workspace-alloc = { path = "./library/rustc-std-workspace-alloc" }
36 rustc-std-workspace-std = { path = "./library/rustc-std-workspace-std" }
37
38 # Mandatory for correctly compiling compiler-builtins
39 [profile.dev.package.compiler_builtins]
40 debug-assertions = false
41 overflow-checks = false
42 codegen-units = 10000
43
44 [profile.release.package.compiler_builtins]
45 debug-assertions = false
46 overflow-checks = false
47 codegen-units = 10000
48 "#,
49 )
50 .unwrap();
51
52 let source_lockfile = RelPath::PATCHES.to_path(dirs).join("stdlib-lock.toml");
53 let target_lockfile = STDLIB_SRC.to_path(dirs).join("Cargo.lock");
54 fs::copy(source_lockfile, target_lockfile).unwrap();
55 }
56
57 pub(crate) struct GitRepo {
58 url: GitRepoUrl,
59 rev: &'static str,
60 content_hash: &'static str,
61 patch_name: &'static str,
62 }
63
64 enum GitRepoUrl {
65 Github { user: &'static str, repo: &'static str },
66 }
67
68 // Note: This uses a hasher which is not cryptographically secure. This is fine as the hash is meant
69 // to protect against accidental modification and outdated downloads, not against manipulation.
70 fn hash_file(file: &std::path::Path) -> u64 {
71 let contents = std::fs::read(file).unwrap();
72 #[allow(deprecated)]
73 let mut hasher = std::hash::SipHasher::new();
74 std::hash::Hash::hash(&contents, &mut hasher);
75 std::hash::Hasher::finish(&hasher)
76 }
77
78 fn hash_dir(dir: &std::path::Path) -> u64 {
79 let mut sub_hashes = std::collections::BTreeMap::new();
80 for entry in std::fs::read_dir(dir).unwrap() {
81 let entry = entry.unwrap();
82 if entry.file_type().unwrap().is_dir() {
83 sub_hashes
84 .insert(entry.file_name().to_str().unwrap().to_owned(), hash_dir(&entry.path()));
85 } else {
86 sub_hashes
87 .insert(entry.file_name().to_str().unwrap().to_owned(), hash_file(&entry.path()));
88 }
89 }
90 #[allow(deprecated)]
91 let mut hasher = std::hash::SipHasher::new();
92 std::hash::Hash::hash(&sub_hashes, &mut hasher);
93 std::hash::Hasher::finish(&hasher)
94 }
95
96 impl GitRepo {
97 pub(crate) const fn github(
98 user: &'static str,
99 repo: &'static str,
100 rev: &'static str,
101 content_hash: &'static str,
102 patch_name: &'static str,
103 ) -> GitRepo {
104 GitRepo { url: GitRepoUrl::Github { user, repo }, rev, content_hash, patch_name }
105 }
106
107 fn download_dir(&self, dirs: &Dirs) -> PathBuf {
108 match self.url {
109 GitRepoUrl::Github { user: _, repo } => RelPath::DOWNLOAD.join(repo).to_path(dirs),
110 }
111 }
112
113 pub(crate) const fn source_dir(&self) -> RelPath {
114 match self.url {
115 GitRepoUrl::Github { user: _, repo } => RelPath::BUILD.join(repo),
116 }
117 }
118
119 pub(crate) fn fetch(&self, dirs: &Dirs) {
120 let download_dir = self.download_dir(dirs);
121
122 if download_dir.exists() {
123 let actual_hash = format!("{:016x}", hash_dir(&download_dir));
124 if actual_hash == self.content_hash {
125 eprintln!("[FRESH] {}", download_dir.display());
126 return;
127 } else {
128 eprintln!(
129 "Mismatched content hash for {download_dir}: {actual_hash} != {content_hash}. Downloading again.",
130 download_dir = download_dir.display(),
131 content_hash = self.content_hash,
132 );
133 }
134 }
135
136 match self.url {
137 GitRepoUrl::Github { user, repo } => {
138 clone_repo_shallow_github(dirs, &download_dir, user, repo, self.rev);
139 }
140 }
141
142 let source_lockfile =
143 RelPath::PATCHES.to_path(dirs).join(format!("{}-lock.toml", self.patch_name));
144 let target_lockfile = download_dir.join("Cargo.lock");
145 if source_lockfile.exists() {
146 assert!(!target_lockfile.exists());
147 fs::copy(source_lockfile, target_lockfile).unwrap();
148 } else {
149 assert!(target_lockfile.exists());
150 }
151
152 let actual_hash = format!("{:016x}", hash_dir(&download_dir));
153 if actual_hash != self.content_hash {
154 eprintln!(
155 "Download of {download_dir} failed with mismatched content hash: {actual_hash} != {content_hash}",
156 download_dir = download_dir.display(),
157 content_hash = self.content_hash,
158 );
159 std::process::exit(1);
160 }
161 }
162
163 pub(crate) fn patch(&self, dirs: &Dirs) {
164 apply_patches(
165 dirs,
166 self.patch_name,
167 &self.download_dir(dirs),
168 &self.source_dir().to_path(dirs),
169 );
170 }
171 }
172
173 #[allow(dead_code)]
174 fn clone_repo(download_dir: &Path, repo: &str, rev: &str) {
175 eprintln!("[CLONE] {}", repo);
176 // Ignore exit code as the repo may already have been checked out
177 git_command(None, "clone").arg(repo).arg(download_dir).spawn().unwrap().wait().unwrap();
178
179 let mut clean_cmd = git_command(download_dir, "checkout");
180 clean_cmd.arg("--").arg(".");
181 spawn_and_wait(clean_cmd);
182
183 let mut checkout_cmd = git_command(download_dir, "checkout");
184 checkout_cmd.arg("-q").arg(rev);
185 spawn_and_wait(checkout_cmd);
186
187 std::fs::remove_dir_all(download_dir.join(".git")).unwrap();
188 }
189
190 fn clone_repo_shallow_github(dirs: &Dirs, download_dir: &Path, user: &str, repo: &str, rev: &str) {
191 if cfg!(windows) {
192 // Older windows doesn't have tar or curl by default. Fall back to using git.
193 clone_repo(download_dir, &format!("https://github.com/{}/{}.git", user, repo), rev);
194 return;
195 }
196
197 let archive_url = format!("https://github.com/{}/{}/archive/{}.tar.gz", user, repo, rev);
198 let archive_file = RelPath::DOWNLOAD.to_path(dirs).join(format!("{}.tar.gz", rev));
199 let archive_dir = RelPath::DOWNLOAD.to_path(dirs).join(format!("{}-{}", repo, rev));
200
201 eprintln!("[DOWNLOAD] {}/{} from {}", user, repo, archive_url);
202
203 // Remove previous results if they exists
204 let _ = std::fs::remove_file(&archive_file);
205 let _ = std::fs::remove_dir_all(&archive_dir);
206 let _ = std::fs::remove_dir_all(&download_dir);
207
208 // Download zip archive
209 let mut download_cmd = Command::new("curl");
210 download_cmd
211 .arg("--max-time")
212 .arg("600")
213 .arg("-y")
214 .arg("30")
215 .arg("-Y")
216 .arg("10")
217 .arg("--connect-timeout")
218 .arg("30")
219 .arg("--continue-at")
220 .arg("-")
221 .arg("--location")
222 .arg("--output")
223 .arg(&archive_file)
224 .arg(archive_url);
225 retry_spawn_and_wait(5, download_cmd);
226
227 // Unpack tar archive
228 let mut unpack_cmd = Command::new("tar");
229 unpack_cmd.arg("xf").arg(&archive_file).current_dir(RelPath::DOWNLOAD.to_path(dirs));
230 spawn_and_wait(unpack_cmd);
231
232 // Rename unpacked dir to the expected name
233 std::fs::rename(archive_dir, &download_dir).unwrap();
234
235 // Cleanup
236 std::fs::remove_file(archive_file).unwrap();
237 }
238
239 fn init_git_repo(repo_dir: &Path) {
240 let mut git_init_cmd = git_command(repo_dir, "init");
241 git_init_cmd.arg("-q");
242 spawn_and_wait(git_init_cmd);
243
244 let mut git_add_cmd = git_command(repo_dir, "add");
245 git_add_cmd.arg(".");
246 spawn_and_wait(git_add_cmd);
247
248 let mut git_commit_cmd = git_command(repo_dir, "commit");
249 git_commit_cmd.arg("-m").arg("Initial commit").arg("-q");
250 spawn_and_wait(git_commit_cmd);
251 }
252
253 fn get_patches(dirs: &Dirs, crate_name: &str) -> Vec<PathBuf> {
254 let mut patches: Vec<_> = fs::read_dir(RelPath::PATCHES.to_path(dirs))
255 .unwrap()
256 .map(|entry| entry.unwrap().path())
257 .filter(|path| path.extension() == Some(OsStr::new("patch")))
258 .filter(|path| {
259 path.file_name()
260 .unwrap()
261 .to_str()
262 .unwrap()
263 .split_once("-")
264 .unwrap()
265 .1
266 .starts_with(crate_name)
267 })
268 .collect();
269 patches.sort();
270 patches
271 }
272
273 pub(crate) fn apply_patches(dirs: &Dirs, crate_name: &str, source_dir: &Path, target_dir: &Path) {
274 // FIXME avoid copy and patch if src, patches and target are unchanged
275
276 eprintln!("[COPY] {crate_name} source");
277
278 remove_dir_if_exists(target_dir);
279 fs::create_dir_all(target_dir).unwrap();
280 if crate_name == "stdlib" {
281 fs::create_dir(target_dir.join("library")).unwrap();
282 copy_dir_recursively(&source_dir.join("library"), &target_dir.join("library"));
283 } else {
284 copy_dir_recursively(source_dir, target_dir);
285 }
286
287 init_git_repo(target_dir);
288
289 if crate_name == "<none>" {
290 return;
291 }
292
293 for patch in get_patches(dirs, crate_name) {
294 eprintln!(
295 "[PATCH] {:?} <- {:?}",
296 target_dir.file_name().unwrap(),
297 patch.file_name().unwrap()
298 );
299 let mut apply_patch_cmd = git_command(target_dir, "am");
300 apply_patch_cmd.arg(patch).arg("-q");
301 spawn_and_wait(apply_patch_cmd);
302 }
303 }