]>
git.proxmox.com Git - rustc.git/blob - src/bootstrap/bootstrap.py
1 from __future__
import absolute_import
, division
, print_function
5 import distutils
.version
16 from time
import time
, sleep
18 # Acquire a lock on the build directory to make sure that
19 # we don't cause a race condition while building
20 # Lock is created in `build_dir/lock.db`
21 def acquire_lock(build_dir
):
25 path
= os
.path
.join(build_dir
, "lock.db")
27 con
= sqlite3
.Connection(path
, timeout
=0)
29 curs
.execute("BEGIN EXCLUSIVE")
30 # The lock is released when the cursor is dropped
32 # If the database is busy then lock has already been acquired
33 # so we wait for the lock.
34 # We retry every quarter second so that execution is passed back to python
35 # so that it can handle signals
36 except sqlite3
.OperationalError
:
39 print("Waiting for lock on build directory")
40 con
= sqlite3
.Connection(path
, timeout
=0.25)
44 curs
.execute("BEGIN EXCLUSIVE")
46 except sqlite3
.OperationalError
:
51 print("warning: sqlite3 not available in python, skipping build directory lock")
52 print("please file an issue on rust-lang/rust")
53 print("this is not a problem for non-concurrent x.py invocations")
58 with tempfile
.NamedTemporaryFile(delete
=False) as temp_file
:
59 temp_path
= temp_file
.name
60 with tarfile
.open(temp_path
, "w:xz"):
63 except tarfile
.CompressionError
:
66 def get(base
, url
, path
, checksums
, verbose
=False):
67 with tempfile
.NamedTemporaryFile(delete
=False) as temp_file
:
68 temp_path
= temp_file
.name
71 if url
not in checksums
:
72 raise RuntimeError(("src/stage0.json doesn't contain a checksum for {}. "
73 "Pre-built artifacts might not be available for this "
74 "target at this time, see https://doc.rust-lang.org/nightly"
75 "/rustc/platform-support.html for more information.")
77 sha256
= checksums
[url
]
78 if os
.path
.exists(path
):
79 if verify(path
, sha256
, False):
81 print("using already-download file", path
)
85 print("ignoring already-download file",
86 path
, "due to failed verification")
88 download(temp_path
, "{}/{}".format(base
, url
), True, verbose
)
89 if not verify(temp_path
, sha256
, verbose
):
90 raise RuntimeError("failed verification")
92 print("moving {} to {}".format(temp_path
, path
))
93 shutil
.move(temp_path
, path
)
95 if os
.path
.isfile(temp_path
):
97 print("removing", temp_path
)
101 def download(path
, url
, probably_big
, verbose
):
102 for _
in range(0, 4):
104 _download(path
, url
, probably_big
, verbose
, True)
107 print("\nspurious failure, trying again")
108 _download(path
, url
, probably_big
, verbose
, False)
111 def _download(path
, url
, probably_big
, verbose
, exception
):
112 # Try to use curl (potentially available on win32
113 # https://devblogs.microsoft.com/commandline/tar-and-curl-come-to-windows/)
114 # If an error occurs:
115 # - If we are on win32 fallback to powershell
116 # - Otherwise raise the error if appropriate
117 if probably_big
or verbose
:
118 print("downloading {}".format(url
))
120 platform_is_win32
= sys
.platform
== 'win32'
122 if probably_big
or verbose
:
126 # If curl is not present on Win32, we shoud not sys.exit
127 # but raise `CalledProcessError` or `OSError` instead
128 require(["curl", "--version"], exception
=platform_is_win32
)
130 "-L", # Follow redirect.
131 "-y", "30", "-Y", "10", # timeout if speed is < 10 bytes/sec for > 30 seconds
132 "--connect-timeout", "30", # timeout if cannot connect within 30 seconds
133 "--retry", "3", "-Sf", "-o", path
, url
],
135 exception
=True, # Will raise RuntimeError on failure
137 except (subprocess
.CalledProcessError
, OSError, RuntimeError):
138 # see http://serverfault.com/questions/301128/how-to-download
139 if platform_is_win32
:
140 run(["PowerShell.exe", "/nologo", "-Command",
141 "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
142 "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(url
, path
)],
145 # Check if the RuntimeError raised by run(curl) should be silenced
146 elif verbose
or exception
:
150 def verify(path
, expected
, verbose
):
151 """Check if the sha256 sum of the given path is valid"""
153 print("verifying", path
)
154 with
open(path
, "rb") as source
:
155 found
= hashlib
.sha256(source
.read()).hexdigest()
156 verified
= found
== expected
158 print("invalid checksum:\n"
160 " expected: {}".format(found
, expected
))
164 def unpack(tarball
, tarball_suffix
, dst
, verbose
=False, match
=None):
165 """Unpack the given tarball file"""
166 print("extracting", tarball
)
167 fname
= os
.path
.basename(tarball
).replace(tarball_suffix
, "")
168 with contextlib
.closing(tarfile
.open(tarball
)) as tar
:
169 for member
in tar
.getnames():
170 if "/" not in member
:
172 name
= member
.replace(fname
+ "/", "", 1)
173 if match
is not None and not name
.startswith(match
):
175 name
= name
[len(match
) + 1:]
177 dst_path
= os
.path
.join(dst
, name
)
179 print(" extracting", member
)
180 tar
.extract(member
, dst
)
181 src_path
= os
.path
.join(dst
, member
)
182 if os
.path
.isdir(src_path
) and os
.path
.exists(dst_path
):
184 shutil
.move(src_path
, dst_path
)
185 shutil
.rmtree(os
.path
.join(dst
, fname
))
188 def run(args
, verbose
=False, exception
=False, is_bootstrap
=False, **kwargs
):
189 """Run a child program in a new process"""
191 print("running: " + ' '.join(args
))
193 # Use Popen here instead of call() as it apparently allows powershell on
194 # Windows to not lock up waiting for input presumably.
195 ret
= subprocess
.Popen(args
, **kwargs
)
198 err
= "failed to run: " + ' '.join(args
)
199 if verbose
or exception
:
200 raise RuntimeError(err
)
201 # For most failures, we definitely do want to print this error, or the user will have no
202 # idea what went wrong. But when we've successfully built bootstrap and it failed, it will
203 # have already printed an error above, so there's no need to print the exact command we're
211 def require(cmd
, exit
=True, exception
=False):
212 '''Run a command, returning its output.
214 If `exception` is `True`, raise the error
215 Otherwise If `exit` is `True`, exit the process
218 return subprocess
.check_output(cmd
).strip()
219 except (subprocess
.CalledProcessError
, OSError) as exc
:
223 print("error: unable to run `{}`: {}".format(' '.join(cmd
), exc
))
224 print("Please make sure it's installed and in the path.")
230 def format_build_time(duration
):
231 """Return a nicer format for build time
233 >>> format_build_time('300')
236 return str(datetime
.timedelta(seconds
=int(duration
)))
239 def default_build_triple(verbose
):
240 """Build triple as in LLVM"""
241 # If the user already has a host build triple with an existing `rustc`
242 # install, use their preference. This fixes most issues with Windows builds
243 # being detected as GNU instead of MSVC.
244 default_encoding
= sys
.getdefaultencoding()
246 version
= subprocess
.check_output(["rustc", "--version", "--verbose"],
247 stderr
=subprocess
.DEVNULL
)
248 version
= version
.decode(default_encoding
)
249 host
= next(x
for x
in version
.split('\n') if x
.startswith("host: "))
250 triple
= host
.split("host: ")[1]
252 print("detected default triple {} from pre-installed rustc".format(triple
))
254 except Exception as e
:
256 print("pre-installed rustc not detected: {}".format(e
))
257 print("falling back to auto-detect")
259 required
= sys
.platform
!= 'win32'
260 ostype
= require(["uname", "-s"], exit
=required
)
261 cputype
= require(['uname', '-m'], exit
=required
)
263 # If we do not have `uname`, assume Windows.
264 if ostype
is None or cputype
is None:
265 return 'x86_64-pc-windows-msvc'
267 ostype
= ostype
.decode(default_encoding
)
268 cputype
= cputype
.decode(default_encoding
)
270 # The goal here is to come up with the same triple as LLVM would,
271 # at least for the subset of platforms we're willing to target.
273 'Darwin': 'apple-darwin',
274 'DragonFly': 'unknown-dragonfly',
275 'FreeBSD': 'unknown-freebsd',
276 'Haiku': 'unknown-haiku',
277 'NetBSD': 'unknown-netbsd',
278 'OpenBSD': 'unknown-openbsd'
281 # Consider the direct transformation first and then the special cases
282 if ostype
in ostype_mapper
:
283 ostype
= ostype_mapper
[ostype
]
284 elif ostype
== 'Linux':
285 os_from_sp
= subprocess
.check_output(
286 ['uname', '-o']).strip().decode(default_encoding
)
287 if os_from_sp
== 'Android':
288 ostype
= 'linux-android'
290 ostype
= 'unknown-linux-gnu'
291 elif ostype
== 'SunOS':
292 ostype
= 'pc-solaris'
293 # On Solaris, uname -m will return a machine classification instead
294 # of a cpu type, so uname -p is recommended instead. However, the
295 # output from that option is too generic for our purposes (it will
296 # always emit 'i386' on x86/amd64 systems). As such, isainfo -k
297 # must be used instead.
298 cputype
= require(['isainfo', '-k']).decode(default_encoding
)
299 # sparc cpus have sun as a target vendor
300 if 'sparc' in cputype
:
301 ostype
= 'sun-solaris'
302 elif ostype
.startswith('MINGW'):
303 # msys' `uname` does not print gcc configuration, but prints msys
304 # configuration. so we cannot believe `uname -m`:
305 # msys1 is always i686 and msys2 is always x86_64.
306 # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
308 ostype
= 'pc-windows-gnu'
310 if os
.environ
.get('MSYSTEM') == 'MINGW64':
312 elif ostype
.startswith('MSYS'):
313 ostype
= 'pc-windows-gnu'
314 elif ostype
.startswith('CYGWIN_NT'):
316 if ostype
.endswith('WOW64'):
318 ostype
= 'pc-windows-gnu'
319 elif sys
.platform
== 'win32':
320 # Some Windows platforms might have a `uname` command that returns a
321 # non-standard string (e.g. gnuwin32 tools returns `windows32`). In
322 # these cases, fall back to using sys.platform.
323 return 'x86_64-pc-windows-msvc'
325 err
= "unknown OS type: {}".format(ostype
)
328 if cputype
in ['powerpc', 'riscv'] and ostype
== 'unknown-freebsd':
329 cputype
= subprocess
.check_output(
330 ['uname', '-p']).strip().decode(default_encoding
)
333 'aarch64': 'aarch64',
341 'powerpc': 'powerpc',
342 'powerpc64': 'powerpc64',
343 'powerpc64le': 'powerpc64le',
345 'ppc64': 'powerpc64',
346 'ppc64le': 'powerpc64le',
347 'riscv64': 'riscv64gc',
355 # Consider the direct transformation first and then the special cases
356 if cputype
in cputype_mapper
:
357 cputype
= cputype_mapper
[cputype
]
358 elif cputype
in {'xscale', 'arm'}:
360 if ostype
== 'linux-android':
361 ostype
= 'linux-androideabi'
362 elif ostype
== 'unknown-freebsd':
363 cputype
= subprocess
.check_output(
364 ['uname', '-p']).strip().decode(default_encoding
)
365 ostype
= 'unknown-freebsd'
366 elif cputype
== 'armv6l':
368 if ostype
== 'linux-android':
369 ostype
= 'linux-androideabi'
372 elif cputype
in {'armv7l', 'armv8l'}:
374 if ostype
== 'linux-android':
375 ostype
= 'linux-androideabi'
378 elif cputype
== 'mips':
379 if sys
.byteorder
== 'big':
381 elif sys
.byteorder
== 'little':
384 raise ValueError("unknown byteorder: {}".format(sys
.byteorder
))
385 elif cputype
== 'mips64':
386 if sys
.byteorder
== 'big':
388 elif sys
.byteorder
== 'little':
391 raise ValueError('unknown byteorder: {}'.format(sys
.byteorder
))
392 # only the n64 ABI is supported, indicate it
394 elif cputype
== 'sparc' or cputype
== 'sparcv9' or cputype
== 'sparc64':
397 err
= "unknown cpu type: {}".format(cputype
)
400 return "{}-{}".format(cputype
, ostype
)
403 @contextlib.contextmanager
404 def output(filepath
):
405 tmp
= filepath
+ '.tmp'
406 with
open(tmp
, 'w') as f
:
409 if os
.path
.exists(filepath
):
410 os
.remove(filepath
) # PermissionError/OSError on Win32 if in use
412 shutil
.copy2(tmp
, filepath
)
415 os
.rename(tmp
, filepath
)
418 class Stage0Toolchain
:
419 def __init__(self
, stage0_payload
):
420 self
.date
= stage0_payload
["date"]
421 self
.version
= stage0_payload
["version"]
424 return self
.version
+ "-" + self
.date
427 class RustBuild(object):
428 """Provide all the methods required to build Rust"""
430 self
.checksums_sha256
= {}
431 self
.stage0_compiler
= None
432 self
._download
_url
= ''
436 self
.config_toml
= ''
438 self
.use_locked_deps
= ''
439 self
.use_vendored_sources
= ''
441 self
.git_version
= None
442 self
.nix_deps_dir
= None
444 def download_toolchain(self
):
445 """Fetch the build system for Rust, written in Rust
447 This method will build a cache directory, then it will fetch the
448 tarball which has the stage0 compiler used to then bootstrap the Rust
451 Each downloaded tarball is extracted, after that, the script
452 will move all the content to the right place.
454 rustc_channel
= self
.stage0_compiler
.version
455 bin_root
= self
.bin_root()
457 key
= self
.stage0_compiler
.date
458 if self
.rustc().startswith(bin_root
) and \
459 (not os
.path
.exists(self
.rustc()) or
460 self
.program_out_of_date(self
.rustc_stamp(), key
)):
461 if os
.path
.exists(bin_root
):
462 shutil
.rmtree(bin_root
)
463 tarball_suffix
= '.tar.xz' if support_xz() else '.tar.gz'
464 filename
= "rust-std-{}-{}{}".format(
465 rustc_channel
, self
.build
, tarball_suffix
)
466 pattern
= "rust-std-{}".format(self
.build
)
467 self
._download
_component
_helper
(filename
, pattern
, tarball_suffix
)
468 filename
= "rustc-{}-{}{}".format(rustc_channel
, self
.build
,
470 self
._download
_component
_helper
(filename
, "rustc", tarball_suffix
)
471 filename
= "cargo-{}-{}{}".format(rustc_channel
, self
.build
,
473 self
._download
_component
_helper
(filename
, "cargo", tarball_suffix
)
474 self
.fix_bin_or_dylib("{}/bin/cargo".format(bin_root
))
476 self
.fix_bin_or_dylib("{}/bin/rustc".format(bin_root
))
477 self
.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root
))
478 lib_dir
= "{}/lib".format(bin_root
)
479 for lib
in os
.listdir(lib_dir
):
480 if lib
.endswith(".so"):
481 self
.fix_bin_or_dylib(os
.path
.join(lib_dir
, lib
))
482 with
output(self
.rustc_stamp()) as rust_stamp
:
483 rust_stamp
.write(key
)
485 def _download_component_helper(
486 self
, filename
, pattern
, tarball_suffix
,
488 key
= self
.stage0_compiler
.date
489 cache_dst
= os
.path
.join(self
.build_dir
, "cache")
490 rustc_cache
= os
.path
.join(cache_dst
, key
)
491 if not os
.path
.exists(rustc_cache
):
492 os
.makedirs(rustc_cache
)
494 base
= self
._download
_url
495 url
= "dist/{}".format(key
)
496 tarball
= os
.path
.join(rustc_cache
, filename
)
497 if not os
.path
.exists(tarball
):
500 "{}/{}".format(url
, filename
),
502 self
.checksums_sha256
,
503 verbose
=self
.verbose
,
505 unpack(tarball
, tarball_suffix
, self
.bin_root(), match
=pattern
, verbose
=self
.verbose
)
507 def fix_bin_or_dylib(self
, fname
):
508 """Modifies the interpreter section of 'fname' to fix the dynamic linker,
509 or the RPATH section, to fix the dynamic library search path
511 This method is only required on NixOS and uses the PatchELF utility to
512 change the interpreter/RPATH of ELF executables.
514 Please see https://nixos.org/patchelf.html for more information
516 default_encoding
= sys
.getdefaultencoding()
518 ostype
= subprocess
.check_output(
519 ['uname', '-s']).strip().decode(default_encoding
)
520 except subprocess
.CalledProcessError
:
522 except OSError as reason
:
523 if getattr(reason
, 'winerror', None) is not None:
527 if ostype
!= "Linux":
530 # If the user has asked binaries to be patched for Nix, then
531 # don't check for NixOS or `/lib`, just continue to the patching.
532 if self
.get_toml('patch-binaries-for-nix', 'build') != 'true':
533 # Use `/etc/os-release` instead of `/etc/NIXOS`.
534 # The latter one does not exist on NixOS when using tmpfs as root.
536 with
open("/etc/os-release", "r") as f
:
537 if not any(l
.strip() in ["ID=nixos", "ID='nixos'", 'ID="nixos"'] for l
in f
):
539 except FileNotFoundError
:
541 if os
.path
.exists("/lib"):
544 # At this point we're pretty sure the user is running NixOS or
546 nix_os_msg
= "info: you seem to be using Nix. Attempting to patch"
547 print(nix_os_msg
, fname
)
549 # Only build `.nix-deps` once.
550 nix_deps_dir
= self
.nix_deps_dir
552 # Run `nix-build` to "build" each dependency (which will likely reuse
553 # the existing `/nix/store` copy, or at most download a pre-built copy).
555 # Importantly, we create a gc-root called `.nix-deps` in the `build/`
556 # directory, but still reference the actual `/nix/store` path in the rpath
557 # as it makes it significantly more robust against changes to the location of
558 # the `.nix-deps` location.
560 # bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
561 # zlib: Needed as a system dependency of `libLLVM-*.so`.
562 # patchelf: Needed for patching ELF binaries (see doc comment above).
563 nix_deps_dir
= "{}/{}".format(self
.build_dir
, ".nix-deps")
565 with (import <nixpkgs> {});
567 name = "rust-stage0-dependencies";
576 subprocess
.check_output([
577 "nix-build", "-E", nix_expr
, "-o", nix_deps_dir
,
579 except subprocess
.CalledProcessError
as reason
:
580 print("warning: failed to call nix-build:", reason
)
582 self
.nix_deps_dir
= nix_deps_dir
584 patchelf
= "{}/bin/patchelf".format(nix_deps_dir
)
586 # Relative default, all binary and dynamic libraries we ship
587 # appear to have this (even when `../lib` is redundant).
589 os
.path
.join(os
.path
.realpath(nix_deps_dir
), "lib")
591 patchelf_args
= ["--set-rpath", ":".join(rpath_entries
)]
592 if not fname
.endswith(".so"):
593 # Finally, set the corret .interp for binaries
594 with
open("{}/nix-support/dynamic-linker".format(nix_deps_dir
)) as dynamic_linker
:
595 patchelf_args
+= ["--set-interpreter", dynamic_linker
.read().rstrip()]
598 subprocess
.check_output([patchelf
] + patchelf_args
+ [fname
])
599 except subprocess
.CalledProcessError
as reason
:
600 print("warning: failed to call patchelf:", reason
)
603 def rustc_stamp(self
):
604 """Return the path for .rustc-stamp at the given stage
607 >>> rb.build_dir = "build"
608 >>> rb.rustc_stamp() == os.path.join("build", "stage0", ".rustc-stamp")
611 return os
.path
.join(self
.bin_root(), '.rustc-stamp')
613 def program_out_of_date(self
, stamp_path
, key
):
614 """Check if the given program stamp is out of date"""
615 if not os
.path
.exists(stamp_path
) or self
.clean
:
617 with
open(stamp_path
, 'r') as stamp
:
618 return key
!= stamp
.read()
621 """Return the binary root directory for the given stage
624 >>> rb.build_dir = "build"
625 >>> rb.bin_root() == os.path.join("build", "stage0")
628 When the 'build' property is given should be a nested directory:
630 >>> rb.build = "devel"
631 >>> rb.bin_root() == os.path.join("build", "devel", "stage0")
635 return os
.path
.join(self
.build_dir
, self
.build
, subdir
)
637 def get_toml(self
, key
, section
=None):
638 """Returns the value of the given key in config.toml, otherwise returns None
641 >>> rb.config_toml = 'key1 = "value1"\\nkey2 = "value2"'
642 >>> rb.get_toml("key2")
645 If the key does not exist, the result is None:
647 >>> rb.get_toml("key3") is None
650 Optionally also matches the section the key appears in
652 >>> rb.config_toml = '[a]\\nkey = "value1"\\n[b]\\nkey = "value2"'
653 >>> rb.get_toml('key', 'a')
655 >>> rb.get_toml('key', 'b')
657 >>> rb.get_toml('key', 'c') is None
660 >>> rb.config_toml = 'key1 = true'
661 >>> rb.get_toml("key1")
666 for line
in self
.config_toml
.splitlines():
667 section_match
= re
.match(r
'^\s*\[(.*)\]\s*$', line
)
668 if section_match
is not None:
669 cur_section
= section_match
.group(1)
671 match
= re
.match(r
'^{}\s*=(.*)$'.format(key
), line
)
672 if match
is not None:
673 value
= match
.group(1)
674 if section
is None or section
== cur_section
:
675 return self
.get_string(value
) or value
.strip()
679 """Return config path for cargo"""
680 return self
.program_config('cargo')
683 """Return config path for rustc"""
684 return self
.program_config('rustc')
686 def program_config(self
, program
):
687 """Return config path for the given program at the given stage
690 >>> rb.config_toml = 'rustc = "rustc"\\n'
691 >>> rb.program_config('rustc')
693 >>> rb.config_toml = ''
694 >>> cargo_path = rb.program_config('cargo')
695 >>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(),
699 config
= self
.get_toml(program
)
701 return os
.path
.expanduser(config
)
702 return os
.path
.join(self
.bin_root(), "bin", "{}{}".format(
703 program
, self
.exe_suffix()))
706 def get_string(line
):
707 """Return the value between double quotes
709 >>> RustBuild.get_string(' "devel" ')
711 >>> RustBuild.get_string(" 'devel' ")
713 >>> RustBuild.get_string('devel') is None
715 >>> RustBuild.get_string(' "devel ')
718 start
= line
.find('"')
720 end
= start
+ 1 + line
[start
+ 1:].find('"')
721 return line
[start
+ 1:end
]
722 start
= line
.find('\'')
724 end
= start
+ 1 + line
[start
+ 1:].find('\'')
725 return line
[start
+ 1:end
]
730 """Return a suffix for executables"""
731 if sys
.platform
== 'win32':
735 def bootstrap_binary(self
):
736 """Return the path of the bootstrap binary
739 >>> rb.build_dir = "build"
740 >>> rb.bootstrap_binary() == os.path.join("build", "bootstrap",
741 ... "debug", "bootstrap")
744 return os
.path
.join(self
.build_dir
, "bootstrap", "debug", "bootstrap")
746 def build_bootstrap(self
):
747 """Build bootstrap"""
748 print("Building rustbuild")
749 build_dir
= os
.path
.join(self
.build_dir
, "bootstrap")
750 if self
.clean
and os
.path
.exists(build_dir
):
751 shutil
.rmtree(build_dir
)
752 env
= os
.environ
.copy()
753 # `CARGO_BUILD_TARGET` breaks bootstrap build.
754 # See also: <https://github.com/rust-lang/rust/issues/70208>.
755 if "CARGO_BUILD_TARGET" in env
:
756 del env
["CARGO_BUILD_TARGET"]
757 env
["CARGO_TARGET_DIR"] = build_dir
758 env
["RUSTC"] = self
.rustc()
759 env
["LD_LIBRARY_PATH"] = os
.path
.join(self
.bin_root(), "lib") + \
760 (os
.pathsep
+ env
["LD_LIBRARY_PATH"]) \
761 if "LD_LIBRARY_PATH" in env
else ""
762 env
["DYLD_LIBRARY_PATH"] = os
.path
.join(self
.bin_root(), "lib") + \
763 (os
.pathsep
+ env
["DYLD_LIBRARY_PATH"]) \
764 if "DYLD_LIBRARY_PATH" in env
else ""
765 env
["LIBRARY_PATH"] = os
.path
.join(self
.bin_root(), "lib") + \
766 (os
.pathsep
+ env
["LIBRARY_PATH"]) \
767 if "LIBRARY_PATH" in env
else ""
769 # preserve existing RUSTFLAGS
770 env
.setdefault("RUSTFLAGS", "")
771 build_section
= "target.{}".format(self
.build
)
773 if self
.get_toml("crt-static", build_section
) == "true":
774 target_features
+= ["+crt-static"]
775 elif self
.get_toml("crt-static", build_section
) == "false":
776 target_features
+= ["-crt-static"]
778 env
["RUSTFLAGS"] += " -C target-feature=" + (",".join(target_features
))
779 target_linker
= self
.get_toml("linker", build_section
)
780 if target_linker
is not None:
781 env
["RUSTFLAGS"] += " -C linker=" + target_linker
782 env
["RUSTFLAGS"] += " -Wrust_2018_idioms -Wunused_lifetimes"
783 env
["RUSTFLAGS"] += " -Wsemicolon_in_expressions_from_macros"
784 if self
.get_toml("deny-warnings", "rust") != "false":
785 env
["RUSTFLAGS"] += " -Dwarnings"
787 env
["PATH"] = os
.path
.join(self
.bin_root(), "bin") + \
788 os
.pathsep
+ env
["PATH"]
789 if not os
.path
.isfile(self
.cargo()):
790 raise Exception("no cargo executable found at `{}`".format(
792 args
= [self
.cargo(), "build", "--manifest-path",
793 os
.path
.join(self
.rust_root
, "src/bootstrap/Cargo.toml")]
794 for _
in range(0, self
.verbose
):
795 args
.append("--verbose")
796 if self
.use_locked_deps
:
797 args
.append("--locked")
798 if self
.use_vendored_sources
:
799 args
.append("--frozen")
800 if self
.get_toml("metrics", "build"):
801 args
.append("--features")
802 args
.append("build-metrics")
803 run(args
, env
=env
, verbose
=self
.verbose
)
805 def build_triple(self
):
806 """Build triple as in LLVM
808 Note that `default_build_triple` is moderately expensive,
809 so use `self.build` where possible.
811 config
= self
.get_toml('build')
814 return default_build_triple(self
.verbose
)
816 def check_submodule(self
, module
):
817 checked_out
= subprocess
.Popen(["git", "rev-parse", "HEAD"],
818 cwd
=os
.path
.join(self
.rust_root
, module
),
819 stdout
=subprocess
.PIPE
)
822 def update_submodule(self
, module
, checked_out
, recorded_submodules
):
823 module_path
= os
.path
.join(self
.rust_root
, module
)
825 default_encoding
= sys
.getdefaultencoding()
826 checked_out
= checked_out
.communicate()[0].decode(default_encoding
).strip()
827 if recorded_submodules
[module
] == checked_out
:
830 print("Updating submodule", module
)
832 run(["git", "submodule", "-q", "sync", module
],
833 cwd
=self
.rust_root
, verbose
=self
.verbose
)
835 update_args
= ["git", "submodule", "update", "--init", "--recursive", "--depth=1"]
836 if self
.git_version
>= distutils
.version
.LooseVersion("2.11.0"):
837 update_args
.append("--progress")
838 update_args
.append(module
)
840 run(update_args
, cwd
=self
.rust_root
, verbose
=self
.verbose
, exception
=True)
842 print("Failed updating submodule. This is probably due to uncommitted local changes.")
843 print('Either stash the changes by running "git stash" within the submodule\'s')
844 print('directory, reset them by running "git reset --hard", or commit them.')
845 print("To reset all submodules' changes run", end
=" ")
846 print('"git submodule foreach --recursive git reset --hard".')
849 run(["git", "reset", "-q", "--hard"],
850 cwd
=module_path
, verbose
=self
.verbose
)
851 run(["git", "clean", "-qdfx"],
852 cwd
=module_path
, verbose
=self
.verbose
)
854 def update_submodules(self
):
855 """Update submodules"""
856 has_git
= os
.path
.exists(os
.path
.join(self
.rust_root
, ".git"))
857 # This just arbitrarily checks for cargo, but any workspace member in
858 # a submodule would work.
859 has_submodules
= os
.path
.exists(os
.path
.join(self
.rust_root
, "src/tools/cargo/Cargo.toml"))
860 if not has_git
and not has_submodules
:
861 print("This is not a git repository, and the requisite git submodules were not found.")
862 print("If you downloaded the source from https://github.com/rust-lang/rust/releases,")
863 print("those sources will not work. Instead, consider downloading from the source")
864 print("releases linked at")
865 print("https://forge.rust-lang.org/infra/other-installation-methods.html#source-code")
866 print("or clone the repository at https://github.com/rust-lang/rust/.")
868 if not has_git
or self
.get_toml('submodules') == "false":
871 default_encoding
= sys
.getdefaultencoding()
873 # check the existence and version of 'git' command
874 git_version_str
= require(['git', '--version']).split()[2].decode(default_encoding
)
875 self
.git_version
= distutils
.version
.LooseVersion(git_version_str
)
878 print('Updating only changed submodules')
879 default_encoding
= sys
.getdefaultencoding()
880 # Only update submodules that are needed to build bootstrap. These are needed because Cargo
881 # currently requires everything in a workspace to be "locally present" when starting a
882 # build, and will give a hard error if any Cargo.toml files are missing.
883 # FIXME: Is there a way to avoid cloning these eagerly? Bootstrap itself doesn't need to
884 # share a workspace with any tools - maybe it could be excluded from the workspace?
885 # That will still require cloning the submodules the second you check the standard
887 # FIXME: Is there a way to avoid hard-coding the submodules required?
888 # WARNING: keep this in sync with the submodules hard-coded in bootstrap/lib.rs
890 "src/tools/rust-installer",
897 # If build.vendor is set in config.toml, we must update rust-analyzer also.
898 # Otherwise, the bootstrap will fail (#96456).
899 if self
.use_vendored_sources
:
900 submodules
.append("src/tools/rust-analyzer")
901 filtered_submodules
= []
902 submodules_names
= []
903 for module
in submodules
:
904 check
= self
.check_submodule(module
)
905 filtered_submodules
.append((module
, check
))
906 submodules_names
.append(module
)
907 recorded
= subprocess
.Popen(["git", "ls-tree", "HEAD"] + submodules_names
,
908 cwd
=self
.rust_root
, stdout
=subprocess
.PIPE
)
909 recorded
= recorded
.communicate()[0].decode(default_encoding
).strip().splitlines()
911 recorded_submodules
= {}
912 for data
in recorded
:
913 # [mode, kind, hash, filename]
915 recorded_submodules
[data
[3]] = data
[2]
916 for module
in filtered_submodules
:
917 self
.update_submodule(module
[0], module
[1], recorded_submodules
)
918 print(" Submodules updated in %.2f seconds" % (time() - start_time
))
920 def set_dist_environment(self
, url
):
921 """Set download URL for normal environment"""
922 if 'RUSTUP_DIST_SERVER' in os
.environ
:
923 self
._download
_url
= os
.environ
['RUSTUP_DIST_SERVER']
925 self
._download
_url
= url
927 def check_vendored_status(self
):
928 """Check that vendoring is configured properly"""
929 vendor_dir
= os
.path
.join(self
.rust_root
, 'vendor')
930 if 'SUDO_USER' in os
.environ
and not self
.use_vendored_sources
:
932 self
.use_vendored_sources
= True
933 print('info: looks like you\'re trying to run this command as root')
934 print(' and so in order to preserve your $HOME this will now')
935 print(' use vendored sources by default.')
936 if not os
.path
.exists(vendor_dir
):
937 print('error: vendoring required, but vendor directory does not exist.')
938 print(' Run `cargo vendor` without sudo to initialize the '
940 raise Exception("{} not found".format(vendor_dir
))
942 if self
.use_vendored_sources
:
943 config
= ("[source.crates-io]\n"
944 "replace-with = 'vendored-sources'\n"
945 "registry = 'https://example.com'\n"
947 "[source.vendored-sources]\n"
948 "directory = '{}/vendor'\n"
949 .format(self
.rust_root
))
950 if not os
.path
.exists('.cargo'):
951 os
.makedirs('.cargo')
952 with
output('.cargo/config') as cargo_config
:
953 cargo_config
.write(config
)
955 print('info: using vendored source, but .cargo/config is already present.')
956 print(' Reusing the current configuration file. But you may want to '
957 'configure vendoring like this:')
960 if os
.path
.exists('.cargo'):
961 shutil
.rmtree('.cargo')
963 def ensure_vendored(self
):
964 """Ensure that the vendored sources are available if needed"""
965 vendor_dir
= os
.path
.join(self
.rust_root
, 'vendor')
966 # Note that this does not handle updating the vendored dependencies if
967 # the rust git repository is updated. Normal development usually does
968 # not use vendoring, so hopefully this isn't too much of a problem.
969 if self
.use_vendored_sources
and not os
.path
.exists(vendor_dir
):
973 "--sync=./src/tools/rust-analyzer/Cargo.toml",
974 "--sync=./compiler/rustc_codegen_cranelift/Cargo.toml",
975 ], verbose
=self
.verbose
, cwd
=self
.rust_root
)
978 def bootstrap(help_triggered
):
979 """Configure, fetch, build and run the initial bootstrap"""
981 # If the user is asking for help, let them know that the whole download-and-build
982 # process has to happen before anything is printed out.
984 print("info: Downloading and building bootstrap before processing --help")
985 print(" command. See src/bootstrap/README.md for help with common")
988 parser
= argparse
.ArgumentParser(description
='Build rust')
989 parser
.add_argument('--config')
990 parser
.add_argument('--build')
991 parser
.add_argument('--clean', action
='store_true')
992 parser
.add_argument('-v', '--verbose', action
='count', default
=0)
994 args
= [a
for a
in sys
.argv
if a
!= '-h' and a
!= '--help']
995 args
, _
= parser
.parse_known_args(args
)
997 # Configure initial bootstrap
999 build
.rust_root
= os
.path
.abspath(os
.path
.join(__file__
, '../../..'))
1000 build
.verbose
= args
.verbose
1001 build
.clean
= args
.clean
1003 # Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`,
1004 # then `config.toml` in the root directory.
1005 toml_path
= args
.config
or os
.getenv('RUST_BOOTSTRAP_CONFIG')
1006 using_default_path
= toml_path
is None
1007 if using_default_path
:
1008 toml_path
= 'config.toml'
1009 if not os
.path
.exists(toml_path
):
1010 toml_path
= os
.path
.join(build
.rust_root
, toml_path
)
1012 # Give a hard error if `--config` or `RUST_BOOTSTRAP_CONFIG` are set to a missing path,
1013 # but not if `config.toml` hasn't been created.
1014 if not using_default_path
or os
.path
.exists(toml_path
):
1015 with
open(toml_path
) as config
:
1016 build
.config_toml
= config
.read()
1018 profile
= build
.get_toml('profile')
1019 if profile
is not None:
1020 include_file
= 'config.{}.toml'.format(profile
)
1021 include_dir
= os
.path
.join(build
.rust_root
, 'src', 'bootstrap', 'defaults')
1022 include_path
= os
.path
.join(include_dir
, include_file
)
1023 # HACK: This works because `build.get_toml()` returns the first match it finds for a
1024 # specific key, so appending our defaults at the end allows the user to override them
1025 with
open(include_path
) as included_toml
:
1026 build
.config_toml
+= os
.linesep
+ included_toml
.read()
1028 config_verbose
= build
.get_toml('verbose', 'build')
1029 if config_verbose
is not None:
1030 build
.verbose
= max(build
.verbose
, int(config_verbose
))
1032 build
.use_vendored_sources
= build
.get_toml('vendor', 'build') == 'true'
1034 build
.use_locked_deps
= build
.get_toml('locked-deps', 'build') == 'true'
1036 build
.check_vendored_status()
1038 build_dir
= build
.get_toml('build-dir', 'build') or 'build'
1039 build
.build_dir
= os
.path
.abspath(build_dir
)
1041 with
open(os
.path
.join(build
.rust_root
, "src", "stage0.json")) as f
:
1043 build
.checksums_sha256
= data
["checksums_sha256"]
1044 build
.stage0_compiler
= Stage0Toolchain(data
["compiler"])
1046 build
.set_dist_environment(data
["config"]["dist_server"])
1048 build
.build
= args
.build
or build
.build_triple()
1050 # Acquire the lock before doing any build actions
1051 # The lock is released when `lock` is dropped
1052 if not os
.path
.exists(build
.build_dir
):
1053 os
.makedirs(build
.build_dir
)
1054 lock
= acquire_lock(build
.build_dir
)
1055 build
.update_submodules()
1057 # Fetch/build the bootstrap
1058 build
.download_toolchain()
1060 build
.ensure_vendored()
1061 build
.build_bootstrap()
1065 args
= [build
.bootstrap_binary()]
1066 args
.extend(sys
.argv
[1:])
1067 env
= os
.environ
.copy()
1068 env
["BOOTSTRAP_PARENT_ID"] = str(os
.getpid())
1069 env
["BOOTSTRAP_PYTHON"] = sys
.executable
1070 env
["RUSTC_BOOTSTRAP"] = '1'
1071 run(args
, env
=env
, verbose
=build
.verbose
, is_bootstrap
=True)
1075 """Entry point for the bootstrap process"""
1078 # x.py help <cmd> ...
1079 if len(sys
.argv
) > 1 and sys
.argv
[1] == 'help':
1080 sys
.argv
= [sys
.argv
[0], '-h'] + sys
.argv
[2:]
1083 '-h' in sys
.argv
) or ('--help' in sys
.argv
) or (len(sys
.argv
) == 1)
1085 bootstrap(help_triggered
)
1086 if not help_triggered
:
1087 print("Build completed successfully in {}".format(
1088 format_build_time(time() - start_time
)))
1089 except (SystemExit, KeyboardInterrupt) as error
:
1090 if hasattr(error
, 'code') and isinstance(error
.code
, int):
1091 exit_code
= error
.code
1095 if not help_triggered
:
1096 print("Build completed unsuccessfully in {}".format(
1097 format_build_time(time() - start_time
)))
1101 if __name__
== '__main__':