]> git.proxmox.com Git - rustc.git/blob - src/bootstrap/bootstrap.py
New upstream version 1.76.0+dfsg1
[rustc.git] / src / bootstrap / bootstrap.py
1 from __future__ import absolute_import, division, print_function
2 import argparse
3 import contextlib
4 import datetime
5 import hashlib
6 import json
7 import os
8 import re
9 import shutil
10 import subprocess
11 import sys
12 import tarfile
13 import tempfile
14
15 from time import time
16 from multiprocessing import Pool, cpu_count
17
18 try:
19 import lzma
20 except ImportError:
21 lzma = None
22
23 def platform_is_win32():
24 return sys.platform == 'win32'
25
26 if platform_is_win32():
27 EXE_SUFFIX = ".exe"
28 else:
29 EXE_SUFFIX = ""
30
31 def get_cpus():
32 if hasattr(os, "sched_getaffinity"):
33 return len(os.sched_getaffinity(0))
34 if hasattr(os, "cpu_count"):
35 cpus = os.cpu_count()
36 if cpus is not None:
37 return cpus
38 try:
39 return cpu_count()
40 except NotImplementedError:
41 return 1
42
43
44 def eprint(*args, **kwargs):
45 kwargs["file"] = sys.stderr
46 print(*args, **kwargs)
47
48
49 def get(base, url, path, checksums, verbose=False):
50 with tempfile.NamedTemporaryFile(delete=False) as temp_file:
51 temp_path = temp_file.name
52
53 try:
54 if url not in checksums:
55 raise RuntimeError(("src/stage0.json doesn't contain a checksum for {}. "
56 "Pre-built artifacts might not be available for this "
57 "target at this time, see https://doc.rust-lang.org/nightly"
58 "/rustc/platform-support.html for more information.")
59 .format(url))
60 sha256 = checksums[url]
61 if os.path.exists(path):
62 if verify(path, sha256, False):
63 if verbose:
64 eprint("using already-download file", path)
65 return
66 else:
67 if verbose:
68 eprint("ignoring already-download file",
69 path, "due to failed verification")
70 os.unlink(path)
71 download(temp_path, "{}/{}".format(base, url), True, verbose)
72 if not verify(temp_path, sha256, verbose):
73 raise RuntimeError("failed verification")
74 if verbose:
75 eprint("moving {} to {}".format(temp_path, path))
76 shutil.move(temp_path, path)
77 finally:
78 if os.path.isfile(temp_path):
79 if verbose:
80 eprint("removing", temp_path)
81 os.unlink(temp_path)
82
83
84 def download(path, url, probably_big, verbose):
85 for _ in range(4):
86 try:
87 _download(path, url, probably_big, verbose, True)
88 return
89 except RuntimeError:
90 eprint("\nspurious failure, trying again")
91 _download(path, url, probably_big, verbose, False)
92
93
94 def _download(path, url, probably_big, verbose, exception):
95 # Try to use curl (potentially available on win32
96 # https://devblogs.microsoft.com/commandline/tar-and-curl-come-to-windows/)
97 # If an error occurs:
98 # - If we are on win32 fallback to powershell
99 # - Otherwise raise the error if appropriate
100 if probably_big or verbose:
101 eprint("downloading {}".format(url))
102
103 try:
104 if (probably_big or verbose) and "GITHUB_ACTIONS" not in os.environ:
105 option = "-#"
106 else:
107 option = "-s"
108 # If curl is not present on Win32, we should not sys.exit
109 # but raise `CalledProcessError` or `OSError` instead
110 require(["curl", "--version"], exception=platform_is_win32())
111 run(["curl", option,
112 "-L", # Follow redirect.
113 "-y", "30", "-Y", "10", # timeout if speed is < 10 bytes/sec for > 30 seconds
114 "--connect-timeout", "30", # timeout if cannot connect within 30 seconds
115 "-o", path,
116 "--retry", "3", "-SRf", url],
117 verbose=verbose,
118 exception=True, # Will raise RuntimeError on failure
119 )
120 except (subprocess.CalledProcessError, OSError, RuntimeError):
121 # see http://serverfault.com/questions/301128/how-to-download
122 if platform_is_win32():
123 run_powershell([
124 "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
125 "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(url, path)],
126 verbose=verbose,
127 exception=exception)
128 # Check if the RuntimeError raised by run(curl) should be silenced
129 elif verbose or exception:
130 raise
131
132
133 def verify(path, expected, verbose):
134 """Check if the sha256 sum of the given path is valid"""
135 if verbose:
136 eprint("verifying", path)
137 with open(path, "rb") as source:
138 found = hashlib.sha256(source.read()).hexdigest()
139 verified = found == expected
140 if not verified:
141 eprint("invalid checksum:\n"
142 " found: {}\n"
143 " expected: {}".format(found, expected))
144 return verified
145
146
147 def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
148 """Unpack the given tarball file"""
149 eprint("extracting", tarball)
150 fname = os.path.basename(tarball).replace(tarball_suffix, "")
151 with contextlib.closing(tarfile.open(tarball)) as tar:
152 for member in tar.getnames():
153 if "/" not in member:
154 continue
155 name = member.replace(fname + "/", "", 1)
156 if match is not None and not name.startswith(match):
157 continue
158 name = name[len(match) + 1:]
159
160 dst_path = os.path.join(dst, name)
161 if verbose:
162 eprint(" extracting", member)
163 tar.extract(member, dst)
164 src_path = os.path.join(dst, member)
165 if os.path.isdir(src_path) and os.path.exists(dst_path):
166 continue
167 shutil.move(src_path, dst_path)
168 shutil.rmtree(os.path.join(dst, fname))
169
170
171 def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs):
172 """Run a child program in a new process"""
173 if verbose:
174 eprint("running: " + ' '.join(args))
175 sys.stdout.flush()
176 # Ensure that the .exe is used on Windows just in case a Linux ELF has been
177 # compiled in the same directory.
178 if os.name == 'nt' and not args[0].endswith('.exe'):
179 args[0] += '.exe'
180 # Use Popen here instead of call() as it apparently allows powershell on
181 # Windows to not lock up waiting for input presumably.
182 ret = subprocess.Popen(args, **kwargs)
183 code = ret.wait()
184 if code != 0:
185 err = "failed to run: " + ' '.join(args)
186 if verbose or exception:
187 raise RuntimeError(err)
188 # For most failures, we definitely do want to print this error, or the user will have no
189 # idea what went wrong. But when we've successfully built bootstrap and it failed, it will
190 # have already printed an error above, so there's no need to print the exact command we're
191 # running.
192 if is_bootstrap:
193 sys.exit(1)
194 else:
195 sys.exit(err)
196
197 def run_powershell(script, *args, **kwargs):
198 """Run a powershell script"""
199 run(["PowerShell.exe", "/nologo", "-Command"] + script, *args, **kwargs)
200
201
202 def require(cmd, exit=True, exception=False):
203 '''Run a command, returning its output.
204 On error,
205 If `exception` is `True`, raise the error
206 Otherwise If `exit` is `True`, exit the process
207 Else return None.'''
208 try:
209 return subprocess.check_output(cmd).strip()
210 except (subprocess.CalledProcessError, OSError) as exc:
211 if exception:
212 raise
213 elif exit:
214 eprint("ERROR: unable to run `{}`: {}".format(' '.join(cmd), exc))
215 eprint("Please make sure it's installed and in the path.")
216 sys.exit(1)
217 return None
218
219
220
221 def format_build_time(duration):
222 """Return a nicer format for build time
223
224 >>> format_build_time('300')
225 '0:05:00'
226 """
227 return str(datetime.timedelta(seconds=int(duration)))
228
229
230 def default_build_triple(verbose):
231 """Build triple as in LLVM"""
232 # If we're on Windows and have an existing `rustc` toolchain, use `rustc --version --verbose`
233 # to find our host target triple. This fixes an issue with Windows builds being detected
234 # as GNU instead of MSVC.
235 # Otherwise, detect it via `uname`
236 default_encoding = sys.getdefaultencoding()
237
238 if platform_is_win32():
239 try:
240 version = subprocess.check_output(["rustc", "--version", "--verbose"],
241 stderr=subprocess.DEVNULL)
242 version = version.decode(default_encoding)
243 host = next(x for x in version.split('\n') if x.startswith("host: "))
244 triple = host.split("host: ")[1]
245 if verbose:
246 eprint("detected default triple {} from pre-installed rustc".format(triple))
247 return triple
248 except Exception as e:
249 if verbose:
250 eprint("pre-installed rustc not detected: {}".format(e))
251 eprint("falling back to auto-detect")
252
253 required = not platform_is_win32()
254 uname = require(["uname", "-smp"], exit=required)
255
256 # If we do not have `uname`, assume Windows.
257 if uname is None:
258 return 'x86_64-pc-windows-msvc'
259
260 kernel, cputype, processor = uname.decode(default_encoding).split(maxsplit=2)
261
262 # The goal here is to come up with the same triple as LLVM would,
263 # at least for the subset of platforms we're willing to target.
264 kerneltype_mapper = {
265 'Darwin': 'apple-darwin',
266 'DragonFly': 'unknown-dragonfly',
267 'FreeBSD': 'unknown-freebsd',
268 'Haiku': 'unknown-haiku',
269 'NetBSD': 'unknown-netbsd',
270 'OpenBSD': 'unknown-openbsd',
271 'GNU': 'unknown-hurd',
272 }
273
274 # Consider the direct transformation first and then the special cases
275 if kernel in kerneltype_mapper:
276 kernel = kerneltype_mapper[kernel]
277 elif kernel == 'Linux':
278 # Apple doesn't support `-o` so this can't be used in the combined
279 # uname invocation above
280 ostype = require(["uname", "-o"], exit=required).decode(default_encoding)
281 if ostype == 'Android':
282 kernel = 'linux-android'
283 else:
284 kernel = 'unknown-linux-gnu'
285 elif kernel == 'SunOS':
286 kernel = 'pc-solaris'
287 # On Solaris, uname -m will return a machine classification instead
288 # of a cpu type, so uname -p is recommended instead. However, the
289 # output from that option is too generic for our purposes (it will
290 # always emit 'i386' on x86/amd64 systems). As such, isainfo -k
291 # must be used instead.
292 cputype = require(['isainfo', '-k']).decode(default_encoding)
293 # sparc cpus have sun as a target vendor
294 if 'sparc' in cputype:
295 kernel = 'sun-solaris'
296 elif kernel.startswith('MINGW'):
297 # msys' `uname` does not print gcc configuration, but prints msys
298 # configuration. so we cannot believe `uname -m`:
299 # msys1 is always i686 and msys2 is always x86_64.
300 # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
301 # MINGW64 on x86_64.
302 kernel = 'pc-windows-gnu'
303 cputype = 'i686'
304 if os.environ.get('MSYSTEM') == 'MINGW64':
305 cputype = 'x86_64'
306 elif kernel.startswith('MSYS'):
307 kernel = 'pc-windows-gnu'
308 elif kernel.startswith('CYGWIN_NT'):
309 cputype = 'i686'
310 if kernel.endswith('WOW64'):
311 cputype = 'x86_64'
312 kernel = 'pc-windows-gnu'
313 elif platform_is_win32():
314 # Some Windows platforms might have a `uname` command that returns a
315 # non-standard string (e.g. gnuwin32 tools returns `windows32`). In
316 # these cases, fall back to using sys.platform.
317 return 'x86_64-pc-windows-msvc'
318 elif kernel == 'AIX':
319 # `uname -m` returns the machine ID rather than machine hardware on AIX,
320 # so we are unable to use cputype to form triple. AIX 7.2 and
321 # above supports 32-bit and 64-bit mode simultaneously and `uname -p`
322 # returns `powerpc`, however we only supports `powerpc64-ibm-aix` in
323 # rust on AIX. For above reasons, kerneltype_mapper and cputype_mapper
324 # are not used to infer AIX's triple.
325 return 'powerpc64-ibm-aix'
326 else:
327 err = "unknown OS type: {}".format(kernel)
328 sys.exit(err)
329
330 if cputype in ['powerpc', 'riscv'] and kernel == 'unknown-freebsd':
331 cputype = subprocess.check_output(
332 ['uname', '-p']).strip().decode(default_encoding)
333 cputype_mapper = {
334 'BePC': 'i686',
335 'aarch64': 'aarch64',
336 'aarch64eb': 'aarch64',
337 'amd64': 'x86_64',
338 'arm64': 'aarch64',
339 'i386': 'i686',
340 'i486': 'i686',
341 'i686': 'i686',
342 'i686-AT386': 'i686',
343 'i786': 'i686',
344 'loongarch64': 'loongarch64',
345 'm68k': 'm68k',
346 'csky': 'csky',
347 'powerpc': 'powerpc',
348 'powerpc64': 'powerpc64',
349 'powerpc64le': 'powerpc64le',
350 'ppc': 'powerpc',
351 'ppc64': 'powerpc64',
352 'ppc64le': 'powerpc64le',
353 'riscv64': 'riscv64gc',
354 's390x': 's390x',
355 'x64': 'x86_64',
356 'x86': 'i686',
357 'x86-64': 'x86_64',
358 'x86_64': 'x86_64'
359 }
360
361 # Consider the direct transformation first and then the special cases
362 if cputype in cputype_mapper:
363 cputype = cputype_mapper[cputype]
364 elif cputype in {'xscale', 'arm'}:
365 cputype = 'arm'
366 if kernel == 'linux-android':
367 kernel = 'linux-androideabi'
368 elif kernel == 'unknown-freebsd':
369 cputype = processor
370 kernel = 'unknown-freebsd'
371 elif cputype == 'armv6l':
372 cputype = 'arm'
373 if kernel == 'linux-android':
374 kernel = 'linux-androideabi'
375 else:
376 kernel += 'eabihf'
377 elif cputype in {'armv7l', 'armv8l'}:
378 cputype = 'armv7'
379 if kernel == 'linux-android':
380 kernel = 'linux-androideabi'
381 else:
382 kernel += 'eabihf'
383 elif cputype == 'mips':
384 if sys.byteorder == 'big':
385 cputype = 'mips'
386 elif sys.byteorder == 'little':
387 cputype = 'mipsel'
388 else:
389 raise ValueError("unknown byteorder: {}".format(sys.byteorder))
390 elif cputype == 'mips64':
391 if sys.byteorder == 'big':
392 cputype = 'mips64'
393 elif sys.byteorder == 'little':
394 cputype = 'mips64el'
395 else:
396 raise ValueError('unknown byteorder: {}'.format(sys.byteorder))
397 # only the n64 ABI is supported, indicate it
398 kernel += 'abi64'
399 elif cputype == 'sparc' or cputype == 'sparcv9' or cputype == 'sparc64':
400 pass
401 else:
402 err = "unknown cpu type: {}".format(cputype)
403 sys.exit(err)
404
405 return "{}-{}".format(cputype, kernel)
406
407
408 @contextlib.contextmanager
409 def output(filepath):
410 tmp = filepath + '.tmp'
411 with open(tmp, 'w') as f:
412 yield f
413 try:
414 if os.path.exists(filepath):
415 os.remove(filepath) # PermissionError/OSError on Win32 if in use
416 except OSError:
417 shutil.copy2(tmp, filepath)
418 os.remove(tmp)
419 return
420 os.rename(tmp, filepath)
421
422
423 class Stage0Toolchain:
424 def __init__(self, stage0_payload):
425 self.date = stage0_payload["date"]
426 self.version = stage0_payload["version"]
427
428 def channel(self):
429 return self.version + "-" + self.date
430
431
432 class DownloadInfo:
433 """A helper class that can be pickled into a parallel subprocess"""
434
435 def __init__(
436 self,
437 base_download_url,
438 download_path,
439 bin_root,
440 tarball_path,
441 tarball_suffix,
442 checksums_sha256,
443 pattern,
444 verbose,
445 ):
446 self.base_download_url = base_download_url
447 self.download_path = download_path
448 self.bin_root = bin_root
449 self.tarball_path = tarball_path
450 self.tarball_suffix = tarball_suffix
451 self.checksums_sha256 = checksums_sha256
452 self.pattern = pattern
453 self.verbose = verbose
454
455 def download_component(download_info):
456 if not os.path.exists(download_info.tarball_path):
457 get(
458 download_info.base_download_url,
459 download_info.download_path,
460 download_info.tarball_path,
461 download_info.checksums_sha256,
462 verbose=download_info.verbose,
463 )
464
465 def unpack_component(download_info):
466 unpack(
467 download_info.tarball_path,
468 download_info.tarball_suffix,
469 download_info.bin_root,
470 match=download_info.pattern,
471 verbose=download_info.verbose,
472 )
473
474 class FakeArgs:
475 """Used for unit tests to avoid updating all call sites"""
476 def __init__(self):
477 self.build = ''
478 self.build_dir = ''
479 self.clean = False
480 self.verbose = False
481 self.json_output = False
482 self.color = 'auto'
483 self.warnings = 'default'
484
485 class RustBuild(object):
486 """Provide all the methods required to build Rust"""
487 def __init__(self, config_toml="", args=None):
488 if args is None:
489 args = FakeArgs()
490 self.git_version = None
491 self.nix_deps_dir = None
492 self._should_fix_bins_and_dylibs = None
493 self.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
494
495 self.config_toml = config_toml
496
497 self.clean = args.clean
498 self.json_output = args.json_output
499 self.verbose = args.verbose
500 self.color = args.color
501 self.warnings = args.warnings
502
503 config_verbose_count = self.get_toml('verbose', 'build')
504 if config_verbose_count is not None:
505 self.verbose = max(self.verbose, int(config_verbose_count))
506
507 self.use_vendored_sources = self.get_toml('vendor', 'build') == 'true'
508 self.use_locked_deps = self.get_toml('locked-deps', 'build') == 'true'
509
510 build_dir = args.build_dir or self.get_toml('build-dir', 'build') or 'build'
511 self.build_dir = os.path.abspath(build_dir)
512
513 with open(os.path.join(self.rust_root, "src", "stage0.json")) as f:
514 data = json.load(f)
515 self.checksums_sha256 = data["checksums_sha256"]
516 self.stage0_compiler = Stage0Toolchain(data["compiler"])
517 self.download_url = os.getenv("RUSTUP_DIST_SERVER") or data["config"]["dist_server"]
518
519 self.build = args.build or self.build_triple()
520
521
522 def download_toolchain(self):
523 """Fetch the build system for Rust, written in Rust
524
525 This method will build a cache directory, then it will fetch the
526 tarball which has the stage0 compiler used to then bootstrap the Rust
527 compiler itself.
528
529 Each downloaded tarball is extracted, after that, the script
530 will move all the content to the right place.
531 """
532 rustc_channel = self.stage0_compiler.version
533 bin_root = self.bin_root()
534
535 key = self.stage0_compiler.date
536 if self.rustc().startswith(bin_root) and \
537 (not os.path.exists(self.rustc()) or
538 self.program_out_of_date(self.rustc_stamp(), key)):
539 if os.path.exists(bin_root):
540 # HACK: On Windows, we can't delete rust-analyzer-proc-macro-server while it's
541 # running. Kill it.
542 if platform_is_win32():
543 print("Killing rust-analyzer-proc-macro-srv before deleting stage0 toolchain")
544 regex = '{}\\\\(host|{})\\\\stage0\\\\libexec'.format(
545 os.path.basename(self.build_dir),
546 self.build
547 )
548 script = (
549 # NOTE: can't use `taskkill` or `Get-Process -Name` because they error if
550 # the server isn't running.
551 'Get-Process | ' +
552 'Where-Object {$_.Name -eq "rust-analyzer-proc-macro-srv"} |' +
553 'Where-Object {{$_.Path -match "{}"}} |'.format(regex) +
554 'Stop-Process'
555 )
556 run_powershell([script])
557 shutil.rmtree(bin_root)
558
559 key = self.stage0_compiler.date
560 cache_dst = os.path.join(self.build_dir, "cache")
561 rustc_cache = os.path.join(cache_dst, key)
562 if not os.path.exists(rustc_cache):
563 os.makedirs(rustc_cache)
564
565 tarball_suffix = '.tar.gz' if lzma is None else '.tar.xz'
566
567 toolchain_suffix = "{}-{}{}".format(rustc_channel, self.build, tarball_suffix)
568
569 tarballs_to_download = [
570 ("rust-std-{}".format(toolchain_suffix), "rust-std-{}".format(self.build)),
571 ("rustc-{}".format(toolchain_suffix), "rustc"),
572 ("cargo-{}".format(toolchain_suffix), "cargo"),
573 ]
574
575 tarballs_download_info = [
576 DownloadInfo(
577 base_download_url=self.download_url,
578 download_path="dist/{}/{}".format(self.stage0_compiler.date, filename),
579 bin_root=self.bin_root(),
580 tarball_path=os.path.join(rustc_cache, filename),
581 tarball_suffix=tarball_suffix,
582 checksums_sha256=self.checksums_sha256,
583 pattern=pattern,
584 verbose=self.verbose,
585 )
586 for filename, pattern in tarballs_to_download
587 ]
588
589 # Download the components serially to show the progress bars properly.
590 for download_info in tarballs_download_info:
591 download_component(download_info)
592
593 # Unpack the tarballs in parallle.
594 # In Python 2.7, Pool cannot be used as a context manager.
595 pool_size = min(len(tarballs_download_info), get_cpus())
596 if self.verbose:
597 print('Choosing a pool size of', pool_size, 'for the unpacking of the tarballs')
598 p = Pool(pool_size)
599 try:
600 p.map(unpack_component, tarballs_download_info)
601 finally:
602 p.close()
603 p.join()
604
605 if self.should_fix_bins_and_dylibs():
606 self.fix_bin_or_dylib("{}/bin/cargo".format(bin_root))
607
608 self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root))
609 self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root))
610 self.fix_bin_or_dylib("{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root))
611 lib_dir = "{}/lib".format(bin_root)
612 for lib in os.listdir(lib_dir):
613 if lib.endswith(".so"):
614 self.fix_bin_or_dylib(os.path.join(lib_dir, lib))
615
616 with output(self.rustc_stamp()) as rust_stamp:
617 rust_stamp.write(key)
618
619 def should_fix_bins_and_dylibs(self):
620 """Whether or not `fix_bin_or_dylib` needs to be run; can only be True
621 on NixOS or if config.toml has `build.patch-binaries-for-nix` set.
622 """
623 if self._should_fix_bins_and_dylibs is not None:
624 return self._should_fix_bins_and_dylibs
625
626 def get_answer():
627 default_encoding = sys.getdefaultencoding()
628 try:
629 ostype = subprocess.check_output(
630 ['uname', '-s']).strip().decode(default_encoding)
631 except subprocess.CalledProcessError:
632 return False
633 except OSError as reason:
634 if getattr(reason, 'winerror', None) is not None:
635 return False
636 raise reason
637
638 if ostype != "Linux":
639 return False
640
641 # If the user has explicitly indicated whether binaries should be
642 # patched for Nix, then don't check for NixOS.
643 if self.get_toml("patch-binaries-for-nix", "build") == "true":
644 return True
645 if self.get_toml("patch-binaries-for-nix", "build") == "false":
646 return False
647
648 # Use `/etc/os-release` instead of `/etc/NIXOS`.
649 # The latter one does not exist on NixOS when using tmpfs as root.
650 try:
651 with open("/etc/os-release", "r") as f:
652 is_nixos = any(ln.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"')
653 for ln in f)
654 except FileNotFoundError:
655 is_nixos = False
656
657 # If not on NixOS, then warn if user seems to be atop Nix shell
658 if not is_nixos:
659 in_nix_shell = os.getenv('IN_NIX_SHELL')
660 if in_nix_shell:
661 eprint("The IN_NIX_SHELL environment variable is `{}`;".format(in_nix_shell),
662 "you may need to set `patch-binaries-for-nix=true` in config.toml")
663
664 return is_nixos
665
666 answer = self._should_fix_bins_and_dylibs = get_answer()
667 if answer:
668 eprint("INFO: You seem to be using Nix.")
669 return answer
670
671 def fix_bin_or_dylib(self, fname):
672 """Modifies the interpreter section of 'fname' to fix the dynamic linker,
673 or the RPATH section, to fix the dynamic library search path
674
675 This method is only required on NixOS and uses the PatchELF utility to
676 change the interpreter/RPATH of ELF executables.
677
678 Please see https://nixos.org/patchelf.html for more information
679 """
680 assert self._should_fix_bins_and_dylibs is True
681 eprint("attempting to patch", fname)
682
683 # Only build `.nix-deps` once.
684 nix_deps_dir = self.nix_deps_dir
685 if not nix_deps_dir:
686 # Run `nix-build` to "build" each dependency (which will likely reuse
687 # the existing `/nix/store` copy, or at most download a pre-built copy).
688 #
689 # Importantly, we create a gc-root called `.nix-deps` in the `build/`
690 # directory, but still reference the actual `/nix/store` path in the rpath
691 # as it makes it significantly more robust against changes to the location of
692 # the `.nix-deps` location.
693 #
694 # bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
695 # zlib: Needed as a system dependency of `libLLVM-*.so`.
696 # patchelf: Needed for patching ELF binaries (see doc comment above).
697 nix_deps_dir = "{}/{}".format(self.build_dir, ".nix-deps")
698 nix_expr = '''
699 with (import <nixpkgs> {});
700 symlinkJoin {
701 name = "rust-stage0-dependencies";
702 paths = [
703 zlib
704 patchelf
705 stdenv.cc.bintools
706 ];
707 }
708 '''
709 try:
710 subprocess.check_output([
711 "nix-build", "-E", nix_expr, "-o", nix_deps_dir,
712 ])
713 except subprocess.CalledProcessError as reason:
714 eprint("WARNING: failed to call nix-build:", reason)
715 return
716 self.nix_deps_dir = nix_deps_dir
717
718 patchelf = "{}/bin/patchelf".format(nix_deps_dir)
719 rpath_entries = [
720 # Relative default, all binary and dynamic libraries we ship
721 # appear to have this (even when `../lib` is redundant).
722 "$ORIGIN/../lib",
723 os.path.join(os.path.realpath(nix_deps_dir), "lib")
724 ]
725 patchelf_args = ["--set-rpath", ":".join(rpath_entries)]
726 if not fname.endswith(".so"):
727 # Finally, set the correct .interp for binaries
728 with open("{}/nix-support/dynamic-linker".format(nix_deps_dir)) as dynamic_linker:
729 patchelf_args += ["--set-interpreter", dynamic_linker.read().rstrip()]
730
731 try:
732 subprocess.check_output([patchelf] + patchelf_args + [fname])
733 except subprocess.CalledProcessError as reason:
734 eprint("WARNING: failed to call patchelf:", reason)
735 return
736
737 def rustc_stamp(self):
738 """Return the path for .rustc-stamp at the given stage
739
740 >>> rb = RustBuild()
741 >>> rb.build = "host"
742 >>> rb.build_dir = "build"
743 >>> expected = os.path.join("build", "host", "stage0", ".rustc-stamp")
744 >>> assert rb.rustc_stamp() == expected, rb.rustc_stamp()
745 """
746 return os.path.join(self.bin_root(), '.rustc-stamp')
747
748 def program_out_of_date(self, stamp_path, key):
749 """Check if the given program stamp is out of date"""
750 if not os.path.exists(stamp_path) or self.clean:
751 return True
752 with open(stamp_path, 'r') as stamp:
753 return key != stamp.read()
754
755 def bin_root(self):
756 """Return the binary root directory for the given stage
757
758 >>> rb = RustBuild()
759 >>> rb.build = "devel"
760 >>> expected = os.path.abspath(os.path.join("build", "devel", "stage0"))
761 >>> assert rb.bin_root() == expected, rb.bin_root()
762 """
763 subdir = "stage0"
764 return os.path.join(self.build_dir, self.build, subdir)
765
766 def get_toml(self, key, section=None):
767 """Returns the value of the given key in config.toml, otherwise returns None
768
769 >>> rb = RustBuild()
770 >>> rb.config_toml = 'key1 = "value1"\\nkey2 = "value2"'
771 >>> rb.get_toml("key2")
772 'value2'
773
774 If the key does not exist, the result is None:
775
776 >>> rb.get_toml("key3") is None
777 True
778
779 Optionally also matches the section the key appears in
780
781 >>> rb.config_toml = '[a]\\nkey = "value1"\\n[b]\\nkey = "value2"'
782 >>> rb.get_toml('key', 'a')
783 'value1'
784 >>> rb.get_toml('key', 'b')
785 'value2'
786 >>> rb.get_toml('key', 'c') is None
787 True
788
789 >>> rb.config_toml = 'key1 = true'
790 >>> rb.get_toml("key1")
791 'true'
792 """
793 return RustBuild.get_toml_static(self.config_toml, key, section)
794
795 @staticmethod
796 def get_toml_static(config_toml, key, section=None):
797 cur_section = None
798 for line in config_toml.splitlines():
799 section_match = re.match(r'^\s*\[(.*)\]\s*$', line)
800 if section_match is not None:
801 cur_section = section_match.group(1)
802
803 match = re.match(r'^{}\s*=(.*)$'.format(key), line)
804 if match is not None:
805 value = match.group(1)
806 if section is None or section == cur_section:
807 return RustBuild.get_string(value) or value.strip()
808 return None
809
810 def cargo(self):
811 """Return config path for cargo"""
812 return self.program_config('cargo')
813
814 def rustc(self):
815 """Return config path for rustc"""
816 return self.program_config('rustc')
817
818 def program_config(self, program):
819 """Return config path for the given program at the given stage
820
821 >>> rb = RustBuild()
822 >>> rb.config_toml = 'rustc = "rustc"\\n'
823 >>> rb.program_config('rustc')
824 'rustc'
825 >>> rb.config_toml = ''
826 >>> cargo_path = rb.program_config('cargo')
827 >>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(),
828 ... "bin", "cargo")
829 True
830 """
831 config = self.get_toml(program)
832 if config:
833 return os.path.expanduser(config)
834 return os.path.join(self.bin_root(), "bin", "{}{}".format(program, EXE_SUFFIX))
835
836 @staticmethod
837 def get_string(line):
838 """Return the value between double quotes
839
840 >>> RustBuild.get_string(' "devel" ')
841 'devel'
842 >>> RustBuild.get_string(" 'devel' ")
843 'devel'
844 >>> RustBuild.get_string('devel') is None
845 True
846 >>> RustBuild.get_string(' "devel ')
847 ''
848 """
849 start = line.find('"')
850 if start != -1:
851 end = start + 1 + line[start + 1:].find('"')
852 return line[start + 1:end]
853 start = line.find('\'')
854 if start != -1:
855 end = start + 1 + line[start + 1:].find('\'')
856 return line[start + 1:end]
857 return None
858
859 def bootstrap_binary(self):
860 """Return the path of the bootstrap binary
861
862 >>> rb = RustBuild()
863 >>> rb.build_dir = "build"
864 >>> rb.bootstrap_binary() == os.path.join("build", "bootstrap",
865 ... "debug", "bootstrap")
866 True
867 """
868 return os.path.join(self.build_dir, "bootstrap", "debug", "bootstrap")
869
870 def build_bootstrap(self):
871 """Build bootstrap"""
872 env = os.environ.copy()
873 if "GITHUB_ACTIONS" in env:
874 print("::group::Building bootstrap")
875 else:
876 eprint("Building bootstrap")
877
878 args = self.build_bootstrap_cmd(env)
879 # Run this from the source directory so cargo finds .cargo/config
880 run(args, env=env, verbose=self.verbose, cwd=self.rust_root)
881
882 if "GITHUB_ACTIONS" in env:
883 print("::endgroup::")
884
885 def build_bootstrap_cmd(self, env):
886 """For tests."""
887 build_dir = os.path.join(self.build_dir, "bootstrap")
888 if self.clean and os.path.exists(build_dir):
889 shutil.rmtree(build_dir)
890 # `CARGO_BUILD_TARGET` breaks bootstrap build.
891 # See also: <https://github.com/rust-lang/rust/issues/70208>.
892 if "CARGO_BUILD_TARGET" in env:
893 del env["CARGO_BUILD_TARGET"]
894 env["CARGO_TARGET_DIR"] = build_dir
895 env["RUSTC"] = self.rustc()
896 env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
897 (os.pathsep + env["LD_LIBRARY_PATH"]) \
898 if "LD_LIBRARY_PATH" in env else ""
899 env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
900 (os.pathsep + env["DYLD_LIBRARY_PATH"]) \
901 if "DYLD_LIBRARY_PATH" in env else ""
902 env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
903 (os.pathsep + env["LIBRARY_PATH"]) \
904 if "LIBRARY_PATH" in env else ""
905 env["LIBPATH"] = os.path.join(self.bin_root(), "lib") + \
906 (os.pathsep + env["LIBPATH"]) \
907 if "LIBPATH" in env else ""
908
909 # Export Stage0 snapshot compiler related env variables
910 build_section = "target.{}".format(self.build)
911 host_triple_sanitized = self.build.replace("-", "_")
912 var_data = {
913 "CC": "cc", "CXX": "cxx", "LD": "linker", "AR": "ar", "RANLIB": "ranlib"
914 }
915 for var_name, toml_key in var_data.items():
916 toml_val = self.get_toml(toml_key, build_section)
917 if toml_val is not None:
918 env["{}_{}".format(var_name, host_triple_sanitized)] = toml_val
919
920 # preserve existing RUSTFLAGS
921 env.setdefault("RUSTFLAGS", "")
922
923 target_features = []
924 if self.get_toml("crt-static", build_section) == "true":
925 target_features += ["+crt-static"]
926 elif self.get_toml("crt-static", build_section) == "false":
927 target_features += ["-crt-static"]
928 if target_features:
929 env["RUSTFLAGS"] += " -C target-feature=" + (",".join(target_features))
930 target_linker = self.get_toml("linker", build_section)
931 if target_linker is not None:
932 env["RUSTFLAGS"] += " -C linker=" + target_linker
933 # When changing this list, also update the corresponding list in `Builder::cargo`
934 # in `src/bootstrap/src/core/builder.rs`.
935 env["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes"
936 if self.warnings == "default":
937 deny_warnings = self.get_toml("deny-warnings", "rust") != "false"
938 else:
939 deny_warnings = self.warnings == "deny"
940 if deny_warnings:
941 env["RUSTFLAGS"] += " -Dwarnings"
942
943 # Add RUSTFLAGS_BOOTSTRAP to RUSTFLAGS for bootstrap compilation.
944 # Note that RUSTFLAGS_BOOTSTRAP should always be added to the end of
945 # RUSTFLAGS to be actually effective (e.g., if we have `-Dwarnings` in
946 # RUSTFLAGS, passing `-Awarnings` from RUSTFLAGS_BOOTSTRAP should override it).
947 if "RUSTFLAGS_BOOTSTRAP" in env:
948 env["RUSTFLAGS"] += " " + env["RUSTFLAGS_BOOTSTRAP"]
949
950 env["PATH"] = os.path.join(self.bin_root(), "bin") + \
951 os.pathsep + env["PATH"]
952 if not os.path.isfile(self.cargo()):
953 raise Exception("no cargo executable found at `{}`".format(
954 self.cargo()))
955 args = [self.cargo(), "build", "--manifest-path",
956 os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
957 args.extend("--verbose" for _ in range(self.verbose))
958 if self.use_locked_deps:
959 args.append("--locked")
960 if self.use_vendored_sources:
961 args.append("--frozen")
962 if self.get_toml("metrics", "build"):
963 args.append("--features")
964 args.append("build-metrics")
965 if self.json_output:
966 args.append("--message-format=json")
967 if self.color == "always":
968 args.append("--color=always")
969 elif self.color == "never":
970 args.append("--color=never")
971 try:
972 args += env["CARGOFLAGS"].split()
973 except KeyError:
974 pass
975
976 return args
977
978 def build_triple(self):
979 """Build triple as in LLVM
980
981 Note that `default_build_triple` is moderately expensive,
982 so use `self.build` where possible.
983 """
984 config = self.get_toml('build')
985 return config or default_build_triple(self.verbose)
986
987 def check_vendored_status(self):
988 """Check that vendoring is configured properly"""
989 # keep this consistent with the equivalent check in rustbuild:
990 # https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/lib.rs#L399-L405
991 if 'SUDO_USER' in os.environ and not self.use_vendored_sources:
992 if os.getuid() == 0:
993 self.use_vendored_sources = True
994 eprint('INFO: looks like you\'re trying to run this command as root')
995 eprint(' and so in order to preserve your $HOME this will now')
996 eprint(' use vendored sources by default.')
997
998 cargo_dir = os.path.join(self.rust_root, '.cargo')
999 if self.use_vendored_sources:
1000 vendor_dir = os.path.join(self.rust_root, 'vendor')
1001 if not os.path.exists(vendor_dir):
1002 sync_dirs = "--sync ./src/tools/cargo/Cargo.toml " \
1003 "--sync ./src/tools/rust-analyzer/Cargo.toml " \
1004 "--sync ./compiler/rustc_codegen_cranelift/Cargo.toml " \
1005 "--sync ./src/bootstrap/Cargo.toml "
1006 eprint('ERROR: vendoring required, but vendor directory does not exist.')
1007 eprint(' Run `cargo vendor {}` to initialize the '
1008 'vendor directory.'.format(sync_dirs))
1009 eprint('Alternatively, use the pre-vendored `rustc-src` dist component.')
1010 raise Exception("{} not found".format(vendor_dir))
1011
1012 if not os.path.exists(cargo_dir):
1013 eprint('ERROR: vendoring required, but .cargo/config does not exist.')
1014 raise Exception("{} not found".format(cargo_dir))
1015 else:
1016 if os.path.exists(cargo_dir):
1017 shutil.rmtree(cargo_dir)
1018
1019 def parse_args(args):
1020 """Parse the command line arguments that the python script needs."""
1021 parser = argparse.ArgumentParser(add_help=False)
1022 parser.add_argument('-h', '--help', action='store_true')
1023 parser.add_argument('--config')
1024 parser.add_argument('--build-dir')
1025 parser.add_argument('--build')
1026 parser.add_argument('--color', choices=['always', 'never', 'auto'])
1027 parser.add_argument('--clean', action='store_true')
1028 parser.add_argument('--json-output', action='store_true')
1029 parser.add_argument('--warnings', choices=['deny', 'warn', 'default'], default='default')
1030 parser.add_argument('-v', '--verbose', action='count', default=0)
1031
1032 return parser.parse_known_args(args)[0]
1033
1034 def bootstrap(args):
1035 """Configure, fetch, build and run the initial bootstrap"""
1036 rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
1037
1038 if not os.path.exists(os.path.join(rust_root, '.git')) and \
1039 os.path.exists(os.path.join(rust_root, '.github')):
1040 eprint("warn: Looks like you are trying to bootstrap Rust from a source that is neither a "
1041 "git clone nor distributed tarball.\nThis build may fail due to missing submodules "
1042 "unless you put them in place manually.")
1043
1044 # Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`,
1045 # then `config.toml` in the root directory.
1046 toml_path = args.config or os.getenv('RUST_BOOTSTRAP_CONFIG')
1047 using_default_path = toml_path is None
1048 if using_default_path:
1049 toml_path = 'config.toml'
1050 if not os.path.exists(toml_path):
1051 toml_path = os.path.join(rust_root, toml_path)
1052
1053 # Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path,
1054 # but not if `config.toml` hasn't been created.
1055 if not using_default_path or os.path.exists(toml_path):
1056 with open(toml_path) as config:
1057 config_toml = config.read()
1058 else:
1059 config_toml = ''
1060
1061 profile = RustBuild.get_toml_static(config_toml, 'profile')
1062 if profile is not None:
1063 # Allows creating alias for profile names, allowing
1064 # profiles to be renamed while maintaining back compatibility
1065 # Keep in sync with `profile_aliases` in config.rs
1066 profile_aliases = {
1067 "user": "dist"
1068 }
1069 include_file = 'config.{}.toml'.format(profile_aliases.get(profile) or profile)
1070 include_dir = os.path.join(rust_root, 'src', 'bootstrap', 'defaults')
1071 include_path = os.path.join(include_dir, include_file)
1072
1073 if not os.path.exists(include_path):
1074 raise Exception("Unrecognized config profile '{}'. Check src/bootstrap/defaults"
1075 " for available options.".format(profile))
1076
1077 # HACK: This works because `self.get_toml()` returns the first match it finds for a
1078 # specific key, so appending our defaults at the end allows the user to override them
1079 with open(include_path) as included_toml:
1080 config_toml += os.linesep + included_toml.read()
1081
1082 # Configure initial bootstrap
1083 build = RustBuild(config_toml, args)
1084 build.check_vendored_status()
1085
1086 if not os.path.exists(build.build_dir):
1087 os.makedirs(build.build_dir)
1088
1089 # Fetch/build the bootstrap
1090 build.download_toolchain()
1091 sys.stdout.flush()
1092 build.build_bootstrap()
1093 sys.stdout.flush()
1094
1095 # Run the bootstrap
1096 args = [build.bootstrap_binary()]
1097 args.extend(sys.argv[1:])
1098 env = os.environ.copy()
1099 env["BOOTSTRAP_PARENT_ID"] = str(os.getpid())
1100 env["BOOTSTRAP_PYTHON"] = sys.executable
1101 run(args, env=env, verbose=build.verbose, is_bootstrap=True)
1102
1103
1104 def main():
1105 """Entry point for the bootstrap process"""
1106 start_time = time()
1107
1108 # x.py help <cmd> ...
1109 if len(sys.argv) > 1 and sys.argv[1] == 'help':
1110 sys.argv[1] = '-h'
1111
1112 args = parse_args(sys.argv)
1113 help_triggered = args.help or len(sys.argv) == 1
1114
1115 # If the user is asking for help, let them know that the whole download-and-build
1116 # process has to happen before anything is printed out.
1117 if help_triggered:
1118 eprint(
1119 "INFO: Downloading and building bootstrap before processing --help command.\n"
1120 " See src/bootstrap/README.md for help with common commands.")
1121
1122 exit_code = 0
1123 success_word = "successfully"
1124 try:
1125 bootstrap(args)
1126 except (SystemExit, KeyboardInterrupt) as error:
1127 if hasattr(error, 'code') and isinstance(error.code, int):
1128 exit_code = error.code
1129 else:
1130 exit_code = 1
1131 eprint(error)
1132 success_word = "unsuccessfully"
1133
1134 if not help_triggered:
1135 eprint("Build completed", success_word, "in", format_build_time(time() - start_time))
1136 sys.exit(exit_code)
1137
1138
1139 if __name__ == '__main__':
1140 main()