From 3b2f29766f50d6e3cd1126618408f62575f617e5 Mon Sep 17 00:00:00 2001 From: Ximin Luo Date: Mon, 16 Oct 2017 16:39:26 +0200 Subject: [PATCH] New upstream version 1.21.0+dfsg1 --- CONTRIBUTING.md | 15 +- COPYRIGHT | 41 +- LICENSE-MIT | 2 - README.md | 4 +- RELEASES.md | 373 +- configure | 10 +- src/Cargo.lock | 772 +- src/Cargo.toml | 21 + src/bootstrap/Cargo.toml | 7 +- src/bootstrap/README.md | 36 +- src/bootstrap/bin/main.rs | 7 +- src/bootstrap/bin/rustc.rs | 15 +- src/bootstrap/bootstrap.py | 507 +- src/bootstrap/bootstrap_test.py | 114 + src/bootstrap/builder.rs | 630 ++ src/bootstrap/cache.rs | 267 + src/bootstrap/cc.rs | 34 +- src/bootstrap/channel.rs | 9 +- src/bootstrap/check.rs | 1564 ++-- src/bootstrap/clean.rs | 2 +- src/bootstrap/compile.rs | 819 +- src/bootstrap/config.rs | 215 +- src/bootstrap/config.toml.example | 333 - src/bootstrap/dist.rs | 1985 +++-- src/bootstrap/doc.rs | 905 ++- src/bootstrap/flags.rs | 56 +- src/bootstrap/install.rs | 269 +- src/bootstrap/lib.rs | 635 +- src/bootstrap/metadata.rs | 32 +- src/bootstrap/mk/Makefile.in | 3 + src/bootstrap/native.rs | 648 +- src/bootstrap/sanity.rs | 52 +- src/bootstrap/step.rs | 1817 ----- src/bootstrap/tool.rs | 425 + src/build_helper/lib.rs | 21 +- src/ci/docker/arm-android/Dockerfile | 5 +- src/ci/docker/armhf-gnu/Dockerfile | 9 +- src/ci/docker/asmjs/Dockerfile | 5 - src/ci/docker/cross/Dockerfile | 12 +- src/ci/docker/cross/install-mips-musl.sh | 2 +- src/ci/docker/cross/install-mipsel-musl.sh | 2 +- src/ci/docker/cross/install-x86_64-redox.sh | 23 + src/ci/docker/disabled/aarch64-gnu/Dockerfile | 80 + src/ci/docker/disabled/aarch64-gnu/config | 3100 ++++++++ .../disabled/dist-aarch64-android/Dockerfile | 5 - .../disabled/dist-armv7-android/Dockerfile | 5 - .../disabled/dist-i686-android/Dockerfile | 5 - .../disabled/dist-x86_64-android/Dockerfile | 5 - .../disabled/dist-x86_64-redox/Dockerfile | 22 + src/ci/docker/disabled/wasm32-exp/Dockerfile | 7 - src/ci/docker/disabled/wasm32/Dockerfile | 7 +- src/ci/docker/dist-aarch64-linux/Dockerfile | 5 - src/ci/docker/dist-android/Dockerfile | 5 - src/ci/docker/dist-arm-linux/Dockerfile | 5 - src/ci/docker/dist-armhf-linux/Dockerfile | 5 - src/ci/docker/dist-armv7-linux/Dockerfile | 5 - src/ci/docker/dist-fuchsia/Dockerfile | 5 - .../docker/dist-i586-gnu-i686-musl/Dockerfile | 5 - src/ci/docker/dist-i686-freebsd/Dockerfile | 5 - .../dist-i686-freebsd/build-toolchain.sh | 4 +- src/ci/docker/dist-i686-linux/Dockerfile | 5 - .../docker/dist-i686-linux/build-openssl.sh | 2 +- src/ci/docker/dist-mips-linux/Dockerfile | 5 - src/ci/docker/dist-mips64-linux/Dockerfile | 5 - src/ci/docker/dist-mips64el-linux/Dockerfile | 5 - src/ci/docker/dist-mipsel-linux/Dockerfile | 5 - src/ci/docker/dist-powerpc-linux/Dockerfile | 5 - src/ci/docker/dist-powerpc64-linux/Dockerfile | 4 - .../docker/dist-powerpc64le-linux/Dockerfile | 5 - src/ci/docker/dist-s390x-linux/Dockerfile | 5 - src/ci/docker/dist-x86_64-freebsd/Dockerfile | 5 - .../dist-x86_64-freebsd/build-toolchain.sh | 4 +- src/ci/docker/dist-x86_64-linux/Dockerfile | 5 - .../docker/dist-x86_64-linux/build-openssl.sh | 2 +- src/ci/docker/dist-x86_64-musl/Dockerfile | 5 - src/ci/docker/dist-x86_64-netbsd/Dockerfile | 5 - .../build-netbsd-toolchain.sh | 2 +- src/ci/docker/i686-gnu-nopt/Dockerfile | 5 - src/ci/docker/i686-gnu/Dockerfile | 5 - src/ci/docker/run.sh | 21 +- src/ci/docker/scripts/android-ndk.sh | 2 +- src/ci/docker/scripts/android-sdk.sh | 2 +- src/ci/docker/scripts/crosstool-ng.sh | 2 +- src/ci/docker/scripts/emscripten-wasm.sh | 6 +- src/ci/docker/scripts/emscripten.sh | 4 +- src/ci/docker/scripts/make3.sh | 2 +- .../qemu-bare-bones-addentropy.c} | 0 .../rcS => scripts/qemu-bare-bones-rcS} | 0 src/ci/docker/scripts/sccache.sh | 6 +- src/ci/docker/x86_64-gnu-aux/Dockerfile | 5 - src/ci/docker/x86_64-gnu-debug/Dockerfile | 5 - src/ci/docker/x86_64-gnu-distcheck/Dockerfile | 5 - .../x86_64-gnu-full-bootstrap/Dockerfile | 5 - .../docker/x86_64-gnu-incremental/Dockerfile | 5 - src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile | 5 - src/ci/docker/x86_64-gnu-nopt/Dockerfile | 5 - src/ci/docker/x86_64-gnu/Dockerfile | 5 - src/ci/run.sh | 7 +- src/doc/book/ci/build.sh | 8 +- src/doc/book/first-edition/src/lifetimes.md | 4 +- src/doc/book/first-edition/src/structs.md | 2 +- src/doc/book/second-edition/Cargo.toml | 4 + .../book/second-edition/convert-quotes.sh} | 14 +- src/doc/book/second-edition/dictionary.txt | 7 + .../book/second-edition/nostarch/chapter01.md | 100 +- .../book/second-edition/nostarch/chapter02.md | 8 +- .../book/second-edition/nostarch/chapter03.md | 18 +- .../book/second-edition/nostarch/chapter04.md | 4 +- .../book/second-edition/nostarch/chapter05.md | 176 +- .../book/second-edition/nostarch/chapter06.md | 6 +- .../book/second-edition/nostarch/chapter07.md | 26 +- .../book/second-edition/nostarch/chapter08.md | 8 +- .../book/second-edition/nostarch/chapter09.md | 2 +- .../book/second-edition/nostarch/chapter10.md | 382 +- .../book/second-edition/nostarch/chapter11.md | 33 +- .../book/second-edition/nostarch/chapter12.md | 166 +- .../book/second-edition/nostarch/chapter13.md | 891 +-- .../book/second-edition/nostarch/chapter14.md | 1016 ++- .../book/second-edition/nostarch/chapter15.md | 354 +- .../book/second-edition/nostarch/chapter16.md | 288 +- .../book/second-edition/nostarch/chapter17.md | 374 +- .../book/second-edition/nostarch/chapter18.md | 174 +- .../book/second-edition/nostarch/chapter19.md | 1963 +++++ .../book/second-edition/nostarch/chapter20.md | 2277 ++++++ src/doc/book/second-edition/src/SUMMARY.md | 6 +- .../src/appendix-02-operators.md | 8 +- .../src/appendix-06-translation.md | 2 +- .../src/ch01-00-introduction.md | 6 +- .../src/ch01-01-installation.md | 16 +- .../second-edition/src/ch01-02-hello-world.md | 78 +- .../src/ch02-00-guessing-game-tutorial.md | 36 +- .../src/ch03-01-variables-and-mutability.md | 10 +- .../src/ch03-03-how-functions-work.md | 6 +- .../second-edition/src/ch03-04-comments.md | 2 +- .../src/ch03-05-control-flow.md | 9 +- .../src/ch04-01-what-is-ownership.md | 2 +- .../src/ch04-02-references-and-borrowing.md | 2 +- .../second-edition/src/ch05-00-structs.md | 4 +- .../src/ch05-01-defining-structs.md | 50 +- .../src/ch05-02-example-structs.md | 2 +- .../src/ch05-03-method-syntax.md | 35 +- .../src/ch06-01-defining-an-enum.md | 4 +- .../book/second-edition/src/ch06-02-match.md | 4 +- .../src/ch07-01-mod-and-the-filesystem.md | 4 +- ...ch07-02-controlling-visibility-with-pub.md | 4 +- .../src/ch07-03-importing-names-with-use.md | 4 +- .../src/ch08-00-common-collections.md | 2 +- .../second-edition/src/ch08-02-strings.md | 12 +- .../second-edition/src/ch08-03-hash-maps.md | 2 +- .../second-edition/src/ch10-00-generics.md | 58 +- .../book/second-edition/src/ch10-01-syntax.md | 122 +- .../book/second-edition/src/ch10-02-traits.md | 203 +- .../src/ch10-03-lifetime-syntax.md | 238 +- .../src/ch11-01-writing-tests.md | 43 +- .../src/ch11-02-running-tests.md | 6 +- .../src/ch11-03-test-organization.md | 6 +- .../src/ch12-00-an-io-project.md | 33 +- ...h12-01-accepting-command-line-arguments.md | 23 +- ...improving-error-handling-and-modularity.md | 34 +- ...2-04-testing-the-librarys-functionality.md | 32 +- ...2-05-working-with-environment-variables.md | 9 +- ...-06-writing-to-stderr-instead-of-stdout.md | 59 +- .../src/ch13-00-functional-features.md | 12 +- .../second-edition/src/ch13-01-closures.md | 168 +- .../second-edition/src/ch13-02-iterators.md | 101 +- .../src/ch13-03-improving-our-io-project.md | 114 +- .../second-edition/src/ch13-04-performance.md | 42 +- .../src/ch14-00-more-about-cargo.md | 28 +- .../src/ch14-01-release-profiles.md | 116 +- .../src/ch14-02-publishing-to-crates-io.md | 513 +- .../src/ch14-03-cargo-workspaces.md | 227 +- .../src/ch14-04-installing-binaries.md | 29 +- .../src/ch14-05-extending-cargo.md | 16 +- .../src/ch15-00-smart-pointers.md | 20 +- .../book/second-edition/src/ch15-01-box.md | 47 +- .../book/second-edition/src/ch15-02-deref.md | 42 +- .../book/second-edition/src/ch15-03-drop.md | 56 +- src/doc/book/second-edition/src/ch15-04-rc.md | 42 +- .../src/ch15-05-interior-mutability.md | 62 +- .../src/ch15-06-reference-cycles.md | 100 +- .../second-edition/src/ch16-00-concurrency.md | 18 +- .../second-edition/src/ch16-01-threads.md | 54 +- .../src/ch16-02-message-passing.md | 66 +- .../src/ch16-03-shared-state.md | 114 +- ...04-extensible-concurrency-sync-and-send.md | 36 +- .../book/second-edition/src/ch17-00-oop.md | 2 +- .../second-edition/src/ch17-01-what-is-oo.md | 62 +- .../src/ch17-02-trait-objects.md | 112 +- .../src/ch17-03-oo-design-patterns.md | 210 +- .../second-edition/src/ch18-00-patterns.md | 4 +- .../ch18-01-all-the-places-for-patterns.md | 52 +- .../src/ch18-02-refutability.md | 34 +- .../src/ch18-03-pattern-syntax.md | 84 +- .../src/ch19-00-advanced-features.md | 16 +- .../second-edition/src/ch19-01-unsafe-rust.md | 174 +- .../src/ch19-02-advanced-lifetimes.md | 86 +- .../src/ch19-03-advanced-traits.md | 155 +- .../src/ch19-04-advanced-types.md | 114 +- ...ch19-05-advanced-functions-and-closures.md | 42 +- .../src/ch20-00-final-project-a-web-server.md | 16 +- .../src/ch20-01-single-threaded.md | 174 +- .../src/ch20-02-slow-requests.md | 42 +- .../src/ch20-03-designing-the-interface.md | 70 +- .../src/ch20-04-storing-threads.md | 78 +- .../ch20-05-sending-requests-via-channels.md | 94 +- .../ch20-06-graceful-shutdown-and-cleanup.md | 92 +- .../book/second-edition/src/img/trpl14-03.png | Bin 0 -> 65437 bytes .../book/second-edition/src/img/trpl14-05.png | Bin 0 -> 60311 bytes .../book/second-edition/src/img/trpl14-07.png | Bin 0 -> 43085 bytes .../book/second-edition/src/img/trpl14-10.png | Bin 0 -> 68900 bytes .../src/img/{hello.png => trpl20-01.png} | Bin src/doc/book/second-edition/theme/index.hbs | 19 + .../tools/src/bin/convert_quotes.rs | 84 + src/doc/index.md | 2 + src/doc/nomicon/src/README.md | 35 +- src/doc/nomicon/src/SUMMARY.md | 1 + src/doc/nomicon/src/meet-safe-and-unsafe.md | 130 +- src/doc/nomicon/src/other-reprs.md | 2 +- src/doc/nomicon/src/safe-unsafe-meaning.md | 132 +- src/doc/nomicon/src/vec-final.md | 2 +- src/doc/nomicon/src/what-unsafe-does.md | 58 + src/doc/nomicon/src/working-with-unsafe.md | 28 +- src/doc/reference/README.md | 31 + src/doc/reference/src/SUMMARY.md | 3 + src/doc/reference/src/attributes.md | 21 + src/doc/reference/src/comments.md | 115 +- .../reference/src/crates-and-source-files.md | 35 +- src/doc/reference/src/expressions.md | 34 +- src/doc/reference/src/glossory.md | 87 + src/doc/reference/src/identifiers.md | 7 +- src/doc/reference/src/items.md | 632 +- src/doc/reference/src/keywords.md | 84 + src/doc/reference/src/types.md | 79 +- .../reference/src/visibility-and-privacy.md | 9 + src/doc/rustdoc/src/SUMMARY.md | 5 +- src/doc/rustdoc/src/command-line-arguments.md | 28 +- src/doc/rustdoc/src/documentation-tests.md | 238 +- src/doc/rustdoc/src/in-source-directives.md | 3 - src/doc/rustdoc/src/passes.md | 85 +- src/doc/rustdoc/src/plugins.md | 3 - src/doc/rustdoc/src/the-doc-attribute.md | 178 + .../src/language-features/doc-cfg.md | 42 + .../src/language-features/lang-items.md | 8 + .../rvalue-static-promotion.md | 23 - .../library-features/compiler-builtins-lib.md | 35 + .../src/library-features/compiler-fences.md | 106 - .../src/library-features/iterator-for-each.md | 17 - .../src/library-features/splice.md | 11 +- .../src/library-features/string-retain.md | 23 + src/etc/htmldocck.py | 33 +- src/etc/installer/exe/rust.iss | 3 + src/etc/installer/msi/rust.wxs | 10 + src/etc/installer/pkg/Distribution.xml | 69 +- src/etc/natvis/intrinsic.natvis | 24 + src/etc/natvis/liballoc.natvis | 4 +- src/etc/platform-intrinsics/generator.py | 10 +- src/etc/platform-intrinsics/powerpc.json | 161 + src/jemalloc/.appveyor.yml | 42 + src/jemalloc/.travis.yml | 95 + src/jemalloc/ChangeLog | 174 +- src/jemalloc/INSTALL | 42 +- src/jemalloc/Makefile.in | 81 +- src/jemalloc/README | 2 +- src/jemalloc/build-aux/config.guess | 174 +- src/jemalloc/build-aux/config.sub | 76 +- src/jemalloc/configure | 2720 +++++-- src/jemalloc/configure.ac | 553 +- src/jemalloc/doc/html.xsl.in | 1 + src/jemalloc/doc/jemalloc.xml.in | 352 +- src/jemalloc/doc/stylesheet.xsl | 7 +- .../include/jemalloc/internal/arena.h | 502 +- src/jemalloc/include/jemalloc/internal/base.h | 11 +- .../include/jemalloc/internal/bitmap.h | 6 +- .../include/jemalloc/internal/chunk.h | 38 +- .../include/jemalloc/internal/chunk_dss.h | 10 +- src/jemalloc/include/jemalloc/internal/ctl.h | 25 +- .../include/jemalloc/internal/extent.h | 48 +- src/jemalloc/include/jemalloc/internal/hash.h | 4 +- src/jemalloc/include/jemalloc/internal/huge.h | 19 +- .../jemalloc/internal/jemalloc_internal.h.in | 314 +- .../internal/jemalloc_internal_decls.h | 15 + .../internal/jemalloc_internal_defs.h.in | 65 +- src/jemalloc/include/jemalloc/internal/mb.h | 12 +- .../include/jemalloc/internal/mutex.h | 60 +- .../include/jemalloc/internal/nstime.h | 8 +- .../include/jemalloc/internal/pages.h | 7 +- src/jemalloc/include/jemalloc/internal/ph.h | 345 + .../jemalloc/internal/private_symbols.txt | 182 +- src/jemalloc/include/jemalloc/internal/prng.h | 150 +- src/jemalloc/include/jemalloc/internal/prof.h | 80 +- .../include/jemalloc/internal/rtree.h | 160 +- .../include/jemalloc/internal/size_classes.sh | 46 +- src/jemalloc/include/jemalloc/internal/spin.h | 51 + .../include/jemalloc/internal/stats.h | 16 +- .../include/jemalloc/internal/tcache.h | 46 +- src/jemalloc/include/jemalloc/internal/tsd.h | 165 +- src/jemalloc/include/jemalloc/internal/util.h | 34 +- .../include/jemalloc/internal/valgrind.h | 40 +- .../include/jemalloc/internal/witness.h | 304 + .../include/jemalloc/jemalloc_macros.h.in | 8 +- src/jemalloc/jemalloc.pc.in | 2 +- src/jemalloc/msvc/ReadMe.txt | 2 +- .../projects/vc2015/jemalloc/jemalloc.vcxproj | 28 +- .../vc2015/jemalloc/jemalloc.vcxproj.filters | 19 +- .../vc2015/test_threads/test_threads.cpp | 6 +- src/jemalloc/scripts/gen_travis.py | 85 + src/jemalloc/src/arena.c | 1733 +++-- src/jemalloc/src/base.c | 73 +- src/jemalloc/src/bitmap.c | 11 +- src/jemalloc/src/chunk.c | 395 +- src/jemalloc/src/chunk_dss.c | 223 +- src/jemalloc/src/chunk_mmap.c | 10 +- src/jemalloc/src/ckh.c | 29 +- src/jemalloc/src/ctl.c | 440 +- src/jemalloc/src/extent.c | 99 +- src/jemalloc/src/huge.c | 242 +- src/jemalloc/src/jemalloc.c | 1057 ++- src/jemalloc/src/mutex.c | 23 +- src/jemalloc/src/nstime.c | 100 +- src/jemalloc/src/pages.c | 180 +- src/jemalloc/src/prof.c | 639 +- src/jemalloc/src/quarantine.c | 46 +- src/jemalloc/src/rtree.c | 9 +- src/jemalloc/src/spin.c | 2 + src/jemalloc/src/stats.c | 1275 ++- src/jemalloc/src/tcache.c | 241 +- src/jemalloc/src/tsd.c | 22 +- src/jemalloc/src/util.c | 20 +- src/jemalloc/src/witness.c | 136 + src/jemalloc/src/zone.c | 45 +- .../test/include/test/jemalloc_test.h.in | 78 +- src/jemalloc/test/include/test/mtx.h | 2 + src/jemalloc/test/include/test/test.h | 4 + src/jemalloc/test/integration/MALLOCX_ARENA.c | 4 +- src/jemalloc/test/integration/aligned_alloc.c | 20 +- src/jemalloc/test/integration/allocated.c | 17 +- src/jemalloc/test/integration/chunk.c | 102 +- src/jemalloc/test/integration/chunk.sh | 5 + src/jemalloc/test/integration/mallocx.c | 82 +- src/jemalloc/test/integration/mallocx.sh | 5 + src/jemalloc/test/integration/overflow.c | 8 +- .../test/integration/posix_memalign.c | 20 +- src/jemalloc/test/integration/rallocx.c | 4 +- src/jemalloc/test/integration/sdallocx.c | 4 +- src/jemalloc/test/integration/thread_arena.c | 10 +- .../test/integration/thread_tcache_enabled.c | 39 +- src/jemalloc/test/integration/xallocx.c | 8 +- src/jemalloc/test/integration/xallocx.sh | 5 + src/jemalloc/test/src/mtx.c | 7 + src/jemalloc/test/src/test.c | 56 +- src/jemalloc/test/src/timer.c | 5 +- src/jemalloc/test/stress/microbench.c | 3 +- src/jemalloc/test/test.sh.in | 29 +- src/jemalloc/test/unit/a0.c | 19 + src/jemalloc/test/unit/arena_reset.c | 155 + src/jemalloc/test/unit/arena_reset.sh | 5 + src/jemalloc/test/unit/bitmap.c | 4 +- src/jemalloc/test/unit/ckh.c | 8 +- src/jemalloc/test/unit/decay.c | 84 +- src/jemalloc/test/unit/decay.sh | 3 + src/jemalloc/test/unit/extent_quantize.c | 98 + src/jemalloc/test/unit/fork.c | 64 + src/jemalloc/test/unit/junk.c | 25 +- src/jemalloc/test/unit/junk.sh | 5 + src/jemalloc/test/unit/junk_alloc.c | 2 - src/jemalloc/test/unit/junk_alloc.sh | 5 + src/jemalloc/test/unit/junk_free.c | 2 - src/jemalloc/test/unit/junk_free.sh | 5 + src/jemalloc/test/unit/lg_chunk.c | 7 - src/jemalloc/test/unit/lg_chunk.sh | 6 + src/jemalloc/test/unit/mallctl.c | 213 +- src/jemalloc/test/unit/math.c | 4 + src/jemalloc/test/unit/nstime.c | 9 +- src/jemalloc/test/unit/pack.c | 198 + src/jemalloc/test/unit/pack.sh | 5 + src/jemalloc/test/unit/pages.c | 27 + src/jemalloc/test/unit/ph.c | 290 + src/jemalloc/test/unit/prng.c | 219 +- src/jemalloc/test/unit/prof_accum.c | 10 +- src/jemalloc/test/unit/prof_accum.sh | 5 + src/jemalloc/test/unit/prof_active.c | 10 +- src/jemalloc/test/unit/prof_active.sh | 5 + src/jemalloc/test/unit/prof_gdump.c | 17 +- src/jemalloc/test/unit/prof_gdump.sh | 6 + src/jemalloc/test/unit/prof_idump.c | 11 +- src/jemalloc/test/unit/prof_idump.sh | 7 + src/jemalloc/test/unit/prof_reset.c | 21 +- src/jemalloc/test/unit/prof_reset.sh | 5 + src/jemalloc/test/unit/prof_tctx.sh | 5 + src/jemalloc/test/unit/prof_thread_name.c | 26 +- src/jemalloc/test/unit/prof_thread_name.sh | 5 + src/jemalloc/test/unit/quarantine.c | 8 +- src/jemalloc/test/unit/quarantine.sh | 8 + src/jemalloc/test/unit/run_quantize.c | 22 +- src/jemalloc/test/unit/size_classes.c | 102 +- src/jemalloc/test/unit/stats.c | 233 +- src/jemalloc/test/unit/stats_print.c | 1005 +++ src/jemalloc/test/unit/tsd.c | 13 +- src/jemalloc/test/unit/util.c | 32 +- src/jemalloc/test/unit/witness.c | 302 + src/jemalloc/test/unit/zero.c | 21 +- src/jemalloc/test/unit/zero.sh | 5 + src/liballoc/allocator.rs | 78 +- src/liballoc/arc.rs | 334 +- src/liballoc/binary_heap.rs | 6 +- src/liballoc/boxed.rs | 35 +- src/liballoc/btree/map.rs | 14 +- src/liballoc/btree/node.rs | 44 +- src/liballoc/fmt.rs | 101 +- src/liballoc/heap.rs | 21 +- src/liballoc/lib.rs | 12 +- src/liballoc/linked_list.rs | 10 +- src/liballoc/raw_vec.rs | 227 +- src/liballoc/rc.rs | 379 +- src/liballoc/slice.rs | 78 +- src/liballoc/str.rs | 45 +- src/liballoc/string.rs | 175 +- src/liballoc/tests/lib.rs | 7 +- src/liballoc/tests/string.rs | 20 + src/liballoc/tests/vec.rs | 167 + src/liballoc/vec.rs | 144 +- src/liballoc/vec_deque.rs | 12 +- src/liballoc_jemalloc/Cargo.toml | 6 +- src/liballoc_jemalloc/build.rs | 24 +- src/liballoc_jemalloc/lib.rs | 23 +- src/liballoc_system/Cargo.toml | 4 +- src/liballoc_system/lib.rs | 151 +- src/liballoc_system/old.rs | 268 - src/libarena/lib.rs | 4 - src/libbacktrace/config.sub | 47 +- src/libcollections/lib.rs | 2 - src/libcompiler_builtins/src/float/add.rs | 6 +- src/libcompiler_builtins/src/int/udiv.rs | 13 +- src/libcompiler_builtins/src/x86_64.rs | 6 +- src/libcore/array.rs | 3 +- src/libcore/cell.rs | 87 +- src/libcore/clone.rs | 6 +- src/libcore/cmp.rs | 17 +- src/libcore/default.rs | 2 +- src/libcore/fmt/builders.rs | 32 +- src/libcore/fmt/mod.rs | 29 +- src/libcore/hash/mod.rs | 9 +- src/libcore/hash/sip.rs | 2 +- src/libcore/intrinsics.rs | 19 +- src/libcore/iter/iterator.rs | 21 +- src/libcore/iter/mod.rs | 12 +- src/libcore/iter/range.rs | 13 +- src/libcore/iter/traits.rs | 37 +- src/libcore/lib.rs | 4 - src/libcore/mem.rs | 77 +- src/libcore/nonzero.rs | 91 +- src/libcore/num/dec2flt/algorithm.rs | 2 +- src/libcore/num/dec2flt/rawfp.rs | 6 +- src/libcore/num/diy_float.rs | 2 +- src/libcore/num/f32.rs | 3 +- src/libcore/num/f64.rs | 3 - src/libcore/num/flt2dec/mod.rs | 6 +- src/libcore/num/mod.rs | 265 +- src/libcore/num/wrapping.rs | 52 +- src/libcore/ops/arith.rs | 308 +- src/libcore/ops/bit.rs | 278 +- src/libcore/ops/deref.rs | 100 +- src/libcore/ops/drop.rs | 62 +- src/libcore/ops/function.rs | 130 +- src/libcore/ops/index.rs | 49 +- src/libcore/ops/mod.rs | 17 +- src/libcore/ops/place.rs | 2 +- src/libcore/ops/range.rs | 206 +- src/libcore/ops/try.rs | 4 +- src/libcore/ops/unsize.rs | 2 +- src/libcore/option.rs | 2 +- src/libcore/panicking.rs | 25 +- src/libcore/ptr.rs | 79 +- src/libcore/result.rs | 102 +- src/libcore/slice/mod.rs | 17 +- src/libcore/str/mod.rs | 8 +- src/libcore/str/pattern.rs | 34 +- src/libcore/sync/atomic.rs | 61 +- src/libcore/tests/cell.rs | 17 + src/libcore/tests/iter.rs | 2 +- src/libcore/tests/lib.rs | 6 +- src/libcore/tests/nonzero.rs | 6 +- src/libcore/tests/num/mod.rs | 226 + src/libcore/tests/option.rs | 2 +- src/libcore/tests/ptr.rs | 2 +- src/libcore/tests/slice.rs | 12 +- src/libcore/tuple.rs | 1 + src/libfmt_macros/lib.rs | 5 +- src/libgetopts/lib.rs | 29 +- src/libgraphviz/lib.rs | 23 +- src/liblibc/.travis.yml | 112 +- src/liblibc/Cargo.lock | 254 +- src/liblibc/Cargo.toml | 8 +- src/liblibc/README.md | 5 +- src/liblibc/appveyor.yml | 8 +- src/liblibc/ci/README.md | 2 +- src/liblibc/ci/android-install-ndk.sh | 9 +- src/liblibc/ci/android-install-sdk.sh | 2 +- .../docker/aarch64-linux-android/Dockerfile | 13 + .../aarch64-unknown-linux-gnu/Dockerfile | 6 +- .../aarch64-unknown-linux-musl/Dockerfile | 24 + .../docker/arm-linux-androideabi/Dockerfile | 13 + .../arm-unknown-linux-gnueabihf/Dockerfile | 6 +- .../asmjs-unknown-emscripten/Dockerfile | 20 + .../ci/docker/i686-linux-android/Dockerfile | 13 + .../docker/i686-unknown-linux-gnu/Dockerfile | 2 +- .../docker/i686-unknown-linux-musl/Dockerfile | 11 +- .../docker/mips-unknown-linux-gnu/Dockerfile | 6 +- .../docker/mips-unknown-linux-musl/Dockerfile | 10 +- .../mips64-unknown-linux-gnuabi64/Dockerfile | 6 +- .../mipsel-unknown-linux-musl/Dockerfile | 10 +- .../powerpc-unknown-linux-gnu/Dockerfile | 6 +- .../powerpc64-unknown-linux-gnu/Dockerfile | 6 +- .../docker/s390x-unknown-linux-gnu/Dockerfile | 12 + .../wasm32-unknown-emscripten/Dockerfile | 21 + .../wasm32-unknown-emscripten/node-wrapper.sh | 11 + .../ci/docker/x86_64-linux-android/Dockerfile | 2 +- .../docker/x86_64-rumprun-netbsd/Dockerfile | 6 +- .../docker/x86_64-rumprun-netbsd/runtest.rs | 54 + .../docker/x86_64-unknown-freebsd/Dockerfile | 4 +- .../x86_64-unknown-linux-gnu/Dockerfile | 2 +- .../x86_64-unknown-linux-musl/Dockerfile | 11 +- .../docker/x86_64-unknown-openbsd/Dockerfile | 8 - .../ci/emscripten-entry.sh} | 10 +- src/liblibc/ci/emscripten.sh | 54 + src/liblibc/ci/run-docker.sh | 2 +- src/liblibc/ci/run.sh | 127 +- src/liblibc/ci/runtest-android.rs | 41 + src/liblibc/libc-test/Cargo.toml | 12 +- src/liblibc/libc-test/build-generated.rs | 16 - src/liblibc/libc-test/build.rs | 150 +- .../libc-test/generate-files/Cargo.toml | 16 - .../libc-test/run-generated-Cargo.toml | 19 - src/liblibc/libc-test/src/main-generated.rs | 9 - src/liblibc/libc-test/test/linux_fcntl.rs | 7 + src/liblibc/libc-test/{src => test}/main.rs | 2 +- src/liblibc/src/lib.rs | 23 +- src/liblibc/src/macros.rs | 37 - src/liblibc/src/redox.rs | 57 - src/liblibc/src/redox/mod.rs | 104 + src/liblibc/src/redox/net.rs | 110 + src/liblibc/src/unix/bsd/apple/b32.rs | 34 + src/liblibc/src/unix/bsd/apple/b64.rs | 39 + src/liblibc/src/unix/bsd/apple/mod.rs | 496 +- .../src/unix/bsd/freebsdlike/dragonfly/mod.rs | 319 +- .../unix/bsd/freebsdlike/freebsd/aarch64.rs | 2 + .../src/unix/bsd/freebsdlike/freebsd/mod.rs | 293 +- .../unix/bsd/freebsdlike/freebsd/x86_64.rs | 2 + src/liblibc/src/unix/bsd/freebsdlike/mod.rs | 194 +- src/liblibc/src/unix/bsd/mod.rs | 121 +- src/liblibc/src/unix/bsd/netbsdlike/mod.rs | 72 +- .../src/unix/bsd/netbsdlike/netbsd/mod.rs | 292 +- .../unix/bsd/netbsdlike/openbsdlike/mod.rs | 263 +- .../bsd/netbsdlike/openbsdlike/openbsd.rs | 2 + src/liblibc/src/unix/haiku/b32.rs | 1 + src/liblibc/src/unix/haiku/b64.rs | 1 + src/liblibc/src/unix/haiku/mod.rs | 558 +- src/liblibc/src/unix/mod.rs | 86 +- src/liblibc/src/unix/newlib/arm/mod.rs | 5 + src/liblibc/src/unix/newlib/mod.rs | 667 ++ .../src/unix/notbsd/android/b32/arm.rs | 6 + .../src/unix/notbsd/android/b32/mod.rs | 33 +- .../src/unix/notbsd/android/b32/x86.rs | 409 + .../src/unix/notbsd/android/b64/aarch64.rs | 7 + .../src/unix/notbsd/android/b64/mod.rs | 30 + .../src/unix/notbsd/android/b64/x86_64.rs | 370 + src/liblibc/src/unix/notbsd/android/mod.rs | 314 +- src/liblibc/src/unix/notbsd/emscripten.rs | 1667 ++++ .../src/unix/notbsd/linux/mips/mips32.rs | 44 +- .../src/unix/notbsd/linux/mips/mips64.rs | 39 +- src/liblibc/src/unix/notbsd/linux/mips/mod.rs | 136 +- src/liblibc/src/unix/notbsd/linux/mod.rs | 472 +- .../src/unix/notbsd/linux/musl/b32/arm.rs | 46 + .../src/unix/notbsd/linux/musl/b32/asmjs.rs | 336 - .../src/unix/notbsd/linux/musl/b32/mips.rs | 54 +- .../src/unix/notbsd/linux/musl/b32/mod.rs | 27 +- .../src/unix/notbsd/linux/musl/b32/x86.rs | 444 +- .../src/unix/notbsd/linux/musl/b64/aarch64.rs | 78 + .../src/unix/notbsd/linux/musl/b64/mod.rs | 82 +- .../unix/notbsd/linux/musl/b64/powerpc64.rs | 77 + .../src/unix/notbsd/linux/musl/b64/x86_64.rs | 433 +- src/liblibc/src/unix/notbsd/linux/musl/mod.rs | 82 +- .../src/unix/notbsd/linux/other/b32/arm.rs | 48 + .../src/unix/notbsd/linux/other/b32/mod.rs | 15 +- .../unix/notbsd/linux/other/b32/powerpc.rs | 45 +- .../src/unix/notbsd/linux/other/b32/x86.rs | 448 +- .../unix/notbsd/linux/other/b64/aarch64.rs | 49 +- .../src/unix/notbsd/linux/other/b64/mod.rs | 3 + .../unix/notbsd/linux/other/b64/powerpc64.rs | 47 +- .../unix/notbsd/linux/other/b64/sparc64.rs | 51 + .../src/unix/notbsd/linux/other/b64/x86_64.rs | 424 +- .../src/unix/notbsd/linux/other/mod.rs | 131 +- src/liblibc/src/unix/notbsd/linux/s390x.rs | 243 +- src/liblibc/src/unix/notbsd/mod.rs | 254 +- src/liblibc/src/unix/solaris/mod.rs | 324 +- src/liblibc/src/unix/uclibc/mod.rs | 187 +- src/liblibc/src/unix/uclibc/x86_64/l4re.rs | 46 + src/liblibc/src/unix/uclibc/x86_64/mod.rs | 247 +- src/liblibc/src/windows.rs | 15 +- src/libpanic_abort/lib.rs | 2 - src/libpanic_unwind/gcc.rs | 2 +- src/libpanic_unwind/lib.rs | 7 +- src/libproc_macro/lib.rs | 93 +- src/libprofiler_builtins/lib.rs | 6 +- src/librand/distributions/gamma.rs | 6 +- src/librand/distributions/mod.rs | 2 +- src/librand/distributions/normal.rs | 4 +- src/librand/distributions/range.rs | 4 +- src/librand/isaac.rs | 4 +- src/librand/lib.rs | 12 +- src/librand/reseeding.rs | 8 +- src/librustc/build.rs | 15 + src/librustc/dep_graph/dep_node.rs | 24 +- src/librustc/dep_graph/dep_tracking_map.rs | 40 +- src/librustc/dep_graph/edges.rs | 14 +- src/librustc/dep_graph/graph.rs | 15 + src/librustc/diagnostics.rs | 23 +- src/librustc/hir/check_attr.rs | 15 +- src/librustc/hir/def_id.rs | 4 + src/librustc/hir/intravisit.rs | 27 +- src/librustc/hir/lowering.rs | 344 +- src/librustc/hir/map/blocks.rs | 12 + src/librustc/hir/map/collector.rs | 203 +- src/librustc/hir/map/definitions.rs | 120 +- src/librustc/hir/map/mod.rs | 369 +- src/librustc/hir/mod.rs | 173 +- src/librustc/hir/pat_util.rs | 40 +- src/librustc/hir/print.rs | 102 +- src/librustc/ich/hcx.rs | 14 +- src/librustc/ich/impls_const_math.rs | 6 +- src/librustc/ich/impls_hir.rs | 34 +- src/librustc/ich/impls_mir.rs | 59 +- src/librustc/ich/impls_ty.rs | 96 +- src/librustc/ich/mod.rs | 2 +- src/librustc/infer/at.rs | 2 +- .../error_reporting/anon_anon_conflict.rs | 302 + src/librustc/infer/error_reporting/mod.rs | 65 +- .../error_reporting/named_anon_conflict.rs | 170 +- .../infer/error_reporting/need_type_info.rs | 10 +- src/librustc/infer/error_reporting/note.rs | 18 +- src/librustc/infer/error_reporting/util.rs | 196 + src/librustc/infer/freshen.rs | 107 +- src/librustc/infer/higher_ranked/mod.rs | 2 +- src/librustc/infer/lattice.rs | 2 +- src/librustc/infer/mod.rs | 26 +- src/librustc/infer/region_inference/mod.rs | 2 +- src/librustc/infer/resolve.rs | 51 +- src/librustc/lib.rs | 12 +- src/librustc/lint/builtin.rs | 7 + src/librustc/lint/context.rs | 777 +- src/librustc/lint/levels.rs | 370 + src/librustc/lint/mod.rs | 236 +- src/librustc/lint/table.rs | 71 - src/librustc/macros.rs | 16 +- src/librustc/middle/const_val.rs | 10 +- src/librustc/middle/cstore.rs | 46 +- src/librustc/middle/dataflow.rs | 17 +- src/librustc/middle/dead.rs | 97 +- src/librustc/middle/effect.rs | 14 +- src/librustc/middle/expr_use_visitor.rs | 53 +- src/librustc/middle/intrinsicck.rs | 4 +- src/librustc/middle/lang_items.rs | 1 + src/librustc/middle/liveness.rs | 22 +- src/librustc/middle/mem_categorization.rs | 100 +- src/librustc/middle/reachable.rs | 9 +- src/librustc/middle/region.rs | 36 +- src/librustc/middle/resolve_lifetime.rs | 39 +- src/librustc/middle/stability.rs | 30 +- src/librustc/mir/mod.rs | 110 +- src/librustc/mir/tcx.rs | 15 + src/librustc/mir/traversal.rs | 2 +- src/librustc/mir/visit.rs | 72 +- src/librustc/session/config.rs | 77 +- src/librustc/session/mod.rs | 75 +- src/librustc/traits/error_reporting.rs | 53 +- src/librustc/traits/fulfill.rs | 134 +- src/librustc/traits/mod.rs | 8 +- src/librustc/traits/project.rs | 251 +- src/librustc/traits/select.rs | 382 +- src/librustc/traits/specialize/mod.rs | 8 +- .../traits/specialize/specialization_graph.rs | 6 +- src/librustc/traits/structural_impls.rs | 8 +- src/librustc/traits/trans/mod.rs | 39 +- src/librustc/ty/adjustment.rs | 4 +- src/librustc/ty/binding.rs | 35 + src/librustc/ty/context.rs | 503 +- src/librustc/ty/flags.rs | 5 +- .../ty/inhabitedness/def_id_forest.rs | 6 +- src/librustc/ty/inhabitedness/mod.rs | 4 +- src/librustc/ty/instance.rs | 26 +- src/librustc/ty/item_path.rs | 9 +- src/librustc/ty/layout.rs | 57 +- src/librustc/ty/maps.rs | 225 +- src/librustc/ty/mod.rs | 78 +- src/librustc/ty/relate.rs | 2 +- src/librustc/ty/structural_impls.rs | 4 +- src/librustc/ty/sty.rs | 6 +- src/librustc/ty/subst.rs | 4 +- src/librustc/ty/util.rs | 15 +- src/librustc/ty/wf.rs | 52 +- src/librustc/util/common.rs | 115 +- src/librustc/util/nodemap.rs | 3 + src/librustc/util/ppaux.rs | 4 +- src/librustc_allocator/expand.rs | 13 +- src/librustc_allocator/lib.rs | 13 +- src/librustc_apfloat/Cargo.toml | 11 + src/librustc_apfloat/ieee.rs | 2799 +++++++ src/librustc_apfloat/lib.rs | 692 ++ src/librustc_apfloat/ppc.rs | 461 ++ src/librustc_apfloat/tests/ieee.rs | 6891 +++++++++++++++++ src/librustc_apfloat/tests/ppc.rs | 655 ++ src/librustc_asan/lib.rs | 8 +- .../build.rs} | 6 +- src/librustc_back/dynamic_lib.rs | 36 +- src/librustc_back/lib.rs | 45 +- src/librustc_back/target/apple_ios_base.rs | 2 +- src/librustc_back/target/bitrig_base.rs | 3 +- src/librustc_back/target/dragonfly_base.rs | 3 +- src/librustc_back/target/freebsd_base.rs | 3 +- src/librustc_back/target/haiku_base.rs | 4 +- src/librustc_back/target/l4re_base.rs | 82 + src/librustc_back/target/le32_unknown_nacl.rs | 4 +- src/librustc_back/target/linux_base.rs | 3 +- src/librustc_back/target/linux_musl_base.rs | 9 +- src/librustc_back/target/mod.rs | 34 +- src/librustc_back/target/netbsd_base.rs | 3 +- src/librustc_back/target/openbsd_base.rs | 3 +- .../target/powerpc64_unknown_linux_gnu.rs | 6 +- src/librustc_back/target/redox_base.rs | 3 +- .../target/wasm32_experimental_emscripten.rs | 7 +- .../target/wasm32_unknown_emscripten.rs | 2 +- src/librustc_back/target/windows_base.rs | 4 +- src/librustc_back/target/windows_msvc_base.rs | 2 + .../target/x86_64_unknown_l4re_uclibc.rs | 31 + src/librustc_back/tempdir.rs | 9 - src/librustc_bitflags/lib.rs | 4 - src/librustc_borrowck/Cargo.toml | 1 - src/librustc_borrowck/borrowck/check_loans.rs | 101 +- .../borrowck/gather_loans/gather_moves.rs | 51 +- .../borrowck/gather_loans/lifetime.rs | 8 +- .../borrowck/gather_loans/mod.rs | 79 +- .../borrowck/gather_loans/move_error.rs | 33 +- .../borrowck/gather_loans/restrictions.rs | 8 +- src/librustc_borrowck/borrowck/mod.rs | 161 +- src/librustc_borrowck/borrowck/move_data.rs | 72 +- src/librustc_borrowck/diagnostics.rs | 569 +- src/librustc_borrowck/lib.rs | 10 +- src/librustc_const_eval/_match.rs | 12 +- src/librustc_const_eval/check_match.rs | 84 +- src/librustc_const_eval/eval.rs | 189 +- src/librustc_const_eval/lib.rs | 5 +- src/librustc_const_eval/pattern.rs | 115 +- src/librustc_const_math/Cargo.toml | 1 + src/librustc_const_math/float.rs | 205 +- src/librustc_const_math/int.rs | 42 - src/librustc_const_math/lib.rs | 5 +- .../accumulate_vec.rs | 2 +- src/librustc_data_structures/array_vec.rs | 8 +- src/librustc_data_structures/bitslice.rs | 2 + src/librustc_data_structures/bitvec.rs | 4 +- src/librustc_data_structures/blake2b.rs | 3 +- .../control_flow_graph/dominators/mod.rs | 83 +- .../control_flow_graph/iterate/mod.rs | 16 - .../control_flow_graph/iterate/test.rs | 20 - .../control_flow_graph/mod.rs | 3 - .../control_flow_graph/reachable/mod.rs | 62 - .../control_flow_graph/reachable/test.rs | 50 - .../control_flow_graph/test.rs | 2 +- .../control_flow_graph/transpose.rs | 64 - src/librustc_data_structures/fmt_wrap.rs | 31 - src/librustc_data_structures/fnv.rs | 66 - src/librustc_data_structures/fx.rs | 6 - src/librustc_data_structures/graph/mod.rs | 147 +- src/librustc_data_structures/graph/tests.rs | 38 - src/librustc_data_structures/indexed_set.rs | 64 + src/librustc_data_structures/indexed_vec.rs | 12 +- src/librustc_data_structures/ivar.rs | 71 - src/librustc_data_structures/lib.rs | 12 +- .../obligation_forest/mod.rs | 68 +- .../obligation_forest/node_index.rs | 2 +- src/librustc_data_structures/small_vec.rs | 2 +- src/librustc_data_structures/stable_hasher.rs | 2 +- src/librustc_data_structures/unify/mod.rs | 11 +- src/librustc_driver/Cargo.toml | 9 +- src/librustc_driver/build.rs | 17 + src/librustc_driver/driver.rs | 272 +- src/librustc_driver/lib.rs | 203 +- src/librustc_driver/pretty.rs | 111 +- src/librustc_driver/profile/mod.rs | 316 + src/librustc_driver/profile/trace.rs | 315 + src/librustc_driver/target_features.rs | 16 +- src/librustc_driver/test.rs | 6 +- src/librustc_errors/diagnostic.rs | 52 +- src/librustc_errors/diagnostic_builder.rs | 53 +- src/librustc_errors/emitter.rs | 34 +- src/librustc_errors/lib.rs | 86 +- src/librustc_errors/snippet.rs | 2 +- src/librustc_incremental/assert_dep_graph.rs | 2 +- src/librustc_incremental/build.rs | 14 + src/librustc_incremental/calculate_svh/mod.rs | 4 - src/librustc_incremental/lib.rs | 4 - .../persist/dirty_clean.rs | 14 +- src/librustc_incremental/persist/hash.rs | 4 +- src/librustc_incremental/persist/mod.rs | 1 - .../persist/preds/compress/classify/mod.rs | 2 +- .../persist/preds/compress/construct.rs | 2 +- src/librustc_incremental/persist/preds/mod.rs | 4 +- src/librustc_incremental/persist/save.rs | 4 +- .../persist/work_product.rs | 2 +- src/librustc_lint/builtin.rs | 97 +- src/librustc_lint/lib.rs | 18 +- src/librustc_lint/types.rs | 35 +- src/librustc_lint/unused.rs | 51 +- src/librustc_llvm/archive_ro.rs | 8 +- src/librustc_llvm/diagnostic.rs | 36 +- src/librustc_llvm/ffi.rs | 5 +- src/librustc_llvm/lib.rs | 19 +- src/librustc_lsan/lib.rs | 6 +- src/librustc_metadata/Cargo.toml | 1 - src/librustc_metadata/astencode.rs | 2 +- src/librustc_metadata/build.rs | 14 + src/librustc_metadata/creader.rs | 38 +- src/librustc_metadata/cstore.rs | 12 +- src/librustc_metadata/cstore_impl.rs | 26 +- src/librustc_metadata/decoder.rs | 12 +- src/librustc_metadata/encoder.rs | 67 +- src/librustc_metadata/index.rs | 26 - src/librustc_metadata/index_builder.rs | 4 +- src/librustc_metadata/isolated_encoder.rs | 4 +- src/librustc_metadata/lib.rs | 7 - src/librustc_metadata/locator.rs | 18 +- src/librustc_metadata/schema.rs | 8 +- src/librustc_mir/Cargo.toml | 1 + src/librustc_mir/borrow_check.rs | 1258 +++ src/librustc_mir/build/block.rs | 2 +- src/librustc_mir/build/cfg.rs | 8 +- src/librustc_mir/build/expr/as_rvalue.rs | 24 +- src/librustc_mir/build/expr/as_temp.rs | 2 +- src/librustc_mir/build/expr/into.rs | 6 +- src/librustc_mir/build/expr/stmt.rs | 6 +- src/librustc_mir/build/matches/mod.rs | 14 +- src/librustc_mir/build/matches/simplify.rs | 4 +- src/librustc_mir/build/matches/test.rs | 10 +- src/librustc_mir/build/matches/util.rs | 8 +- src/librustc_mir/build/misc.rs | 6 +- src/librustc_mir/build/mod.rs | 41 +- src/librustc_mir/build/scope.rs | 268 +- .../dataflow/drop_flag_effects.rs | 99 +- src/librustc_mir/dataflow/graphviz.rs | 50 - src/librustc_mir/dataflow/impls/borrows.rs | 180 + src/librustc_mir/dataflow/impls/mod.rs | 184 +- src/librustc_mir/dataflow/mod.rs | 285 +- .../dataflow/move_paths/abs_domain.rs | 17 +- .../dataflow/move_paths/builder.rs | 332 + src/librustc_mir/dataflow/move_paths/mod.rs | 318 +- src/librustc_mir/diagnostics.rs | 566 +- src/librustc_mir/hair/cx/block.rs | 25 +- src/librustc_mir/hair/cx/expr.rs | 124 +- src/librustc_mir/hair/cx/mod.rs | 31 +- src/librustc_mir/hair/mod.rs | 3 +- src/librustc_mir/lib.rs | 14 +- src/librustc_mir/shim.rs | 400 +- src/librustc_mir/transform/add_call_guards.rs | 77 +- src/librustc_mir/transform/add_validation.rs | 390 + .../transform/clean_end_regions.rs | 20 +- src/librustc_mir/transform/copy_prop.rs | 4 +- src/librustc_mir/transform/elaborate_drops.rs | 16 +- src/librustc_mir/transform/erase_regions.rs | 37 +- src/librustc_mir/transform/inline.rs | 25 +- src/librustc_mir/transform/instcombine.rs | 4 +- src/librustc_mir/transform/mod.rs | 15 +- src/librustc_mir/transform/nll.rs | 146 + src/librustc_mir/transform/promote_consts.rs | 20 +- src/librustc_mir/transform/qualify_consts.rs | 60 +- src/librustc_mir/transform/rustc_peek.rs | 5 +- src/librustc_mir/transform/simplify.rs | 8 +- src/librustc_mir/transform/type_check.rs | 19 +- src/librustc_mir/util/borrowck_errors.rs | 192 + src/librustc_mir/util/def_use.rs | 12 +- src/librustc_mir/util/elaborate_drops.rs | 4 +- src/librustc_mir/util/mod.rs | 1 + src/librustc_mir/util/patch.rs | 2 +- src/librustc_msan/lib.rs | 6 +- src/librustc_passes/ast_validation.rs | 44 +- src/librustc_passes/consts.rs | 80 +- src/librustc_passes/diagnostics.rs | 37 +- src/librustc_passes/hir_stats.rs | 5 + src/librustc_passes/lib.rs | 9 +- src/librustc_passes/loops.rs | 2 +- src/librustc_passes/mir_stats.rs | 43 +- src/librustc_passes/static_recursion.rs | 4 +- src/librustc_platform_intrinsics/lib.rs | 6 +- src/librustc_platform_intrinsics/powerpc.rs | 342 + src/librustc_plugin/lib.rs | 8 +- src/librustc_plugin/load.rs | 6 +- src/librustc_plugin/registry.rs | 26 +- src/librustc_privacy/lib.rs | 125 +- src/librustc_resolve/build_reduced_graph.rs | 36 +- src/librustc_resolve/check_unused.rs | 15 +- src/librustc_resolve/lib.rs | 488 +- src/librustc_resolve/macros.rs | 49 +- src/librustc_resolve/resolve_imports.rs | 101 +- src/librustc_save_analysis/Cargo.toml | 3 +- src/librustc_save_analysis/dump_visitor.rs | 155 +- src/librustc_save_analysis/json_api_dumper.rs | 66 - src/librustc_save_analysis/json_dumper.rs | 51 +- src/librustc_save_analysis/lib.rs | 236 +- src/librustc_save_analysis/sig.rs | 35 +- src/librustc_save_analysis/span_utils.rs | 96 +- src/librustc_trans/Cargo.toml | 3 +- src/librustc_trans/abi.rs | 24 +- src/librustc_trans/assert_module_sources.rs | 36 +- src/librustc_trans/attributes.rs | 4 +- src/librustc_trans/back/archive.rs | 15 +- src/librustc_trans/back/link.rs | 182 +- src/librustc_trans/back/linker.rs | 75 +- src/librustc_trans/back/lto.rs | 33 +- src/librustc_trans/back/symbol_export.rs | 121 +- src/librustc_trans/back/write.rs | 1346 +++- src/librustc_trans/base.rs | 623 +- src/librustc_trans/build.rs | 16 + src/librustc_trans/builder.rs | 4 +- src/librustc_trans/cabi_aarch64.rs | 8 +- src/librustc_trans/cabi_arm.rs | 60 +- src/librustc_trans/cabi_asmjs.rs | 2 +- src/librustc_trans/cabi_powerpc64.rs | 8 +- src/librustc_trans/cabi_sparc64.rs | 8 +- src/librustc_trans/cabi_x86.rs | 2 +- src/librustc_trans/callee.rs | 22 +- src/librustc_trans/collector.rs | 191 +- src/librustc_trans/common.rs | 10 +- src/librustc_trans/consts.rs | 20 + src/librustc_trans/context.rs | 119 +- .../debuginfo/create_scope_map.rs | 2 +- src/librustc_trans/debuginfo/doc.rs | 8 +- src/librustc_trans/debuginfo/metadata.rs | 46 +- src/librustc_trans/debuginfo/mod.rs | 8 +- src/librustc_trans/debuginfo/source_loc.rs | 6 +- src/librustc_trans/debuginfo/type_names.rs | 34 +- src/librustc_trans/debuginfo/utils.rs | 2 +- src/librustc_trans/intrinsic.rs | 6 +- src/librustc_trans/lib.rs | 82 +- src/librustc_trans/llvm_util.rs | 6 + src/librustc_trans/metadata.rs | 16 +- src/librustc_trans/mir/analyze.rs | 1 + src/librustc_trans/mir/block.rs | 8 +- src/librustc_trans/mir/constant.rs | 98 +- src/librustc_trans/mir/lvalue.rs | 2 +- src/librustc_trans/mir/mod.rs | 20 +- src/librustc_trans/mir/operand.rs | 6 +- src/librustc_trans/mir/rvalue.rs | 66 +- src/librustc_trans/mir/statement.rs | 1 + src/librustc_trans/monomorphize.rs | 6 + src/librustc_trans/partitioning.rs | 260 +- src/librustc_trans/time_graph.rs | 181 + src/librustc_trans/trans_item.rs | 24 +- src/librustc_trans/tvec.rs | 2 +- src/librustc_trans/type_.rs | 32 - src/librustc_trans_utils/Cargo.toml | 15 + src/librustc_trans_utils/lib.rs | 35 + src/librustc_trans_utils/link.rs | 134 + src/librustc_tsan/Cargo.toml | 1 + src/librustc_tsan/lib.rs | 9 +- src/librustc_typeck/astconv.rs | 168 +- src/librustc_typeck/check/_match.rs | 73 +- src/librustc_typeck/check/autoderef.rs | 2 +- src/librustc_typeck/check/callee.rs | 7 +- src/librustc_typeck/check/cast.rs | 41 +- src/librustc_typeck/check/closure.rs | 13 +- src/librustc_typeck/check/coercion.rs | 12 +- src/librustc_typeck/check/demand.rs | 33 +- src/librustc_typeck/check/dropck.rs | 2 +- src/librustc_typeck/check/method/confirm.rs | 139 +- src/librustc_typeck/check/method/mod.rs | 97 +- src/librustc_typeck/check/method/probe.rs | 106 +- src/librustc_typeck/check/method/suggest.rs | 104 +- src/librustc_typeck/check/mod.rs | 441 +- src/librustc_typeck/check/op.rs | 14 +- src/librustc_typeck/check/regionck.rs | 87 +- src/librustc_typeck/check/upvar.rs | 89 +- src/librustc_typeck/check/wfcheck.rs | 51 +- src/librustc_typeck/check/writeback.rs | 193 +- src/librustc_typeck/check_unused.rs | 13 +- src/librustc_typeck/collect.rs | 149 +- .../constrained_type_params.rs | 4 +- src/librustc_typeck/diagnostics.rs | 301 +- src/librustc_typeck/lib.rs | 20 +- src/librustc_typeck/variance/constraints.rs | 14 +- src/librustc_typeck/variance/terms.rs | 4 +- src/librustdoc/Cargo.toml | 15 - src/librustdoc/clean/cfg.rs | 889 +++ src/librustdoc/clean/inline.rs | 28 +- src/librustdoc/clean/mod.rs | 197 +- src/librustdoc/core.rs | 17 +- src/librustdoc/fold.rs | 5 +- src/librustdoc/html/format.rs | 44 +- src/librustdoc/html/highlight.rs | 6 +- src/librustdoc/html/markdown.rs | 34 +- src/librustdoc/html/render.rs | 155 +- src/librustdoc/html/static/main.js | 3 +- src/librustdoc/html/static/rustdoc.css | 42 +- src/librustdoc/html/static/styles/main.css | 57 +- src/librustdoc/html/toc.rs | 10 +- src/librustdoc/lib.rs | 7 +- src/librustdoc/passes/mod.rs | 6 + src/librustdoc/passes/propagate_doc_cfg.rs | 47 + src/librustdoc/plugins.rs | 2 +- src/librustdoc/test.rs | 42 +- src/librustdoc/visit_ast.rs | 50 +- src/librustdoc/visit_lib.rs | 2 +- src/libserialize/json.rs | 4 +- src/libserialize/lib.rs | 3 - src/libserialize/opaque.rs | 4 +- src/libserialize/serialize.rs | 4 +- src/libstd/build.rs | 5 +- src/libstd/collections/hash/map.rs | 22 +- src/libstd/collections/hash/set.rs | 61 +- src/libstd/collections/hash/table.rs | 22 +- src/libstd/error.rs | 2 +- src/libstd/ffi/c_str.rs | 2 +- src/libstd/ffi/os_str.rs | 2 +- src/libstd/fs.rs | 32 +- src/libstd/heap.rs | 3 +- src/libstd/io/buffered.rs | 22 +- src/libstd/io/cursor.rs | 4 +- src/libstd/io/error.rs | 27 +- src/libstd/io/lazy.rs | 2 +- src/libstd/io/mod.rs | 120 +- src/libstd/lib.rs | 30 +- src/libstd/macros.rs | 57 +- src/libstd/memchr.rs | 4 +- src/libstd/net/ip.rs | 2 +- src/libstd/net/tcp.rs | 4 +- src/libstd/net/udp.rs | 2 +- src/libstd/os/mod.rs | 42 +- src/libstd/os/raw.rs | 6 +- src/libstd/os/solaris/raw.rs | 2 +- src/libstd/panic.rs | 2 +- src/libstd/panicking.rs | 46 +- src/libstd/path.rs | 28 +- src/libstd/prelude/mod.rs | 1 + src/libstd/primitive_docs.rs | 257 +- src/libstd/process.rs | 27 +- src/libstd/rt.rs | 4 +- src/libstd/sync/barrier.rs | 2 +- src/libstd/sync/mpsc/blocking.rs | 2 +- src/libstd/sync/mpsc/select.rs | 4 +- src/libstd/sync/mpsc/sync.rs | 2 +- src/libstd/sync/once.rs | 4 +- src/libstd/sys/mod.rs | 30 + src/libstd/sys/redox/args.rs | 2 +- src/libstd/sys/redox/backtrace.rs | 32 - src/libstd/sys/redox/backtrace/mod.rs | 42 + src/libstd/sys/redox/backtrace/printing.rs | 11 + src/libstd/sys/redox/backtrace/tracing.rs | 106 + src/libstd/sys/redox/ext/fs.rs | 2 +- src/libstd/sys/redox/ext/io.rs | 16 + src/libstd/sys/redox/ext/mod.rs | 6 +- src/libstd/sys/redox/ext/process.rs | 2 +- src/libstd/sys/redox/ext/thread.rs | 47 + src/libstd/sys/redox/fd.rs | 4 +- src/libstd/sys/redox/net/dns/mod.rs | 34 +- src/libstd/sys/redox/net/tcp.rs | 4 +- src/libstd/sys/redox/net/udp.rs | 2 +- src/libstd/sys/redox/os.rs | 6 +- src/libstd/sys/redox/process.rs | 51 +- src/libstd/sys/redox/syscall/call.rs | 10 +- src/libstd/sys/redox/syscall/flag.rs | 6 +- src/libstd/sys/unix/args.rs | 2 +- .../sys/unix/backtrace/tracing/gcc_s.rs | 2 +- src/libstd/sys/unix/ext/fs.rs | 6 +- src/libstd/sys/unix/ext/io.rs | 62 +- src/libstd/sys/unix/ext/mod.rs | 3 +- src/libstd/sys/unix/ext/net.rs | 83 +- src/libstd/sys/unix/ext/process.rs | 2 +- src/libstd/sys/unix/fd.rs | 28 +- src/libstd/sys/unix/fs.rs | 17 +- src/libstd/sys/unix/mod.rs | 29 +- src/libstd/sys/unix/process/magenta.rs | 2 +- src/libstd/sys/unix/process/process_common.rs | 5 +- src/libstd/sys/unix/process/process_unix.rs | 5 +- src/libstd/sys/unix/rand.rs | 4 +- src/libstd/sys/unix/thread.rs | 3 +- src/libstd/sys/unix/weak.rs | 2 +- src/libstd/sys/windows/backtrace/mod.rs | 4 +- src/libstd/sys/windows/c.rs | 13 +- src/libstd/sys/windows/ext/fs.rs | 2 +- src/libstd/sys/windows/ext/io.rs | 22 + src/libstd/sys/windows/ext/mod.rs | 1 + src/libstd/sys/windows/fs.rs | 4 +- src/libstd/sys/windows/pipe.rs | 8 +- src/libstd/sys/windows/thread.rs | 6 +- src/libstd/sys/windows/thread_local.rs | 4 +- src/libstd/sys_common/net.rs | 12 +- src/libstd/sys_common/thread_info.rs | 4 +- src/libstd/sys_common/thread_local.rs | 2 +- src/libstd/thread/local.rs | 133 +- src/libstd/thread/mod.rs | 85 +- src/libstd/time/duration.rs | 38 +- src/libstd/time/mod.rs | 23 +- src/libstd_unicode/lib.rs | 2 - src/libstd_unicode/tables.rs | 29 +- src/libstd_unicode/u_str.rs | 2 +- src/libstd_unicode/unicode.py | 29 +- src/libsyntax/ast.rs | 97 +- src/libsyntax/attr.rs | 47 +- src/libsyntax/build.rs | 15 + src/libsyntax/codemap.rs | 32 +- src/libsyntax/config.rs | 10 +- src/libsyntax/diagnostic_list.rs | 92 +- src/libsyntax/diagnostics/plugin.rs | 5 +- src/libsyntax/ext/base.rs | 26 +- src/libsyntax/ext/build.rs | 99 +- src/libsyntax/ext/derive.rs | 1 + src/libsyntax/ext/expand.rs | 69 +- src/libsyntax/ext/placeholders.rs | 21 +- src/libsyntax/ext/quote.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 214 +- src/libsyntax/ext/tt/macro_rules.rs | 36 +- src/libsyntax/ext/tt/quoted.rs | 6 +- src/libsyntax/feature_gate.rs | 122 +- src/libsyntax/fold.rs | 51 +- src/libsyntax/json.rs | 21 +- src/libsyntax/lib.rs | 5 +- src/libsyntax/parse/attr.rs | 18 +- src/libsyntax/parse/classify.rs | 12 +- src/libsyntax/parse/lexer/comments.rs | 6 +- src/libsyntax/parse/lexer/mod.rs | 10 +- src/libsyntax/parse/lexer/tokentrees.rs | 2 +- src/libsyntax/parse/lexer/unicode_chars.rs | 144 +- src/libsyntax/parse/mod.rs | 29 +- src/libsyntax/parse/parser.rs | 763 +- src/libsyntax/print/pp.rs | 2 +- src/libsyntax/print/pprust.rs | 8 +- src/libsyntax/show_span.rs | 4 +- src/libsyntax/std_inject.rs | 9 +- src/libsyntax/test.rs | 52 +- src/libsyntax/tokenstream.rs | 32 + src/libsyntax_ext/Cargo.toml | 1 - src/libsyntax_ext/asm.rs | 16 +- src/libsyntax_ext/deriving/bounds.rs | 4 +- src/libsyntax_ext/deriving/clone.rs | 2 +- src/libsyntax_ext/deriving/cmp/eq.rs | 2 +- src/libsyntax_ext/deriving/cmp/ord.rs | 2 +- src/libsyntax_ext/deriving/cmp/partial_eq.rs | 4 +- src/libsyntax_ext/deriving/cmp/partial_ord.rs | 6 +- src/libsyntax_ext/deriving/debug.rs | 2 +- src/libsyntax_ext/deriving/decodable.rs | 2 +- src/libsyntax_ext/deriving/default.rs | 2 +- src/libsyntax_ext/deriving/encodable.rs | 2 +- src/libsyntax_ext/deriving/generic/mod.rs | 42 +- src/libsyntax_ext/deriving/generic/ty.rs | 15 +- src/libsyntax_ext/deriving/hash.rs | 4 +- src/libsyntax_ext/deriving/mod.rs | 2 +- src/libsyntax_ext/format.rs | 30 +- src/libsyntax_ext/format_foreign.rs | 18 +- src/libsyntax_ext/global_asm.rs | 3 +- src/libsyntax_ext/lib.rs | 18 +- src/libsyntax_ext/proc_macro_registrar.rs | 9 +- src/libsyntax_pos/hygiene.rs | 31 +- src/libsyntax_pos/lib.rs | 83 +- src/libsyntax_pos/symbol.rs | 2 +- src/libterm/lib.rs | 4 - src/libterm/terminfo/mod.rs | 2 +- src/libterm/terminfo/parser/compiled.rs | 4 +- src/libtest/lib.rs | 47 +- src/libunwind/build.rs | 4 +- src/libunwind/lib.rs | 8 +- src/rustc/Cargo.toml | 6 +- src/rustc/libc_shim/Cargo.toml | 5 +- src/rustllvm/ArchiveWrapper.cpp | 4 + src/rustllvm/PassWrapper.cpp | 26 +- src/rustllvm/RustWrapper.cpp | 227 +- src/rustllvm/llvm-rebuild-trigger | 2 +- src/rustllvm/rustllvm.h | 2 + src/stage0.txt | 6 +- .../partitioning/extern-drop-glue.rs | 4 +- .../partitioning/extern-generic.rs | 12 +- .../inlining-from-extern-crate.rs | 4 +- .../partitioning/local-drop-glue.rs | 4 +- .../partitioning/local-generic.rs | 8 +- .../partitioning/local-inlining.rs | 6 +- .../partitioning/local-transitive-inlining.rs | 4 +- .../partitioning/regular-modules.rs | 42 +- .../codegen-units/partitioning/statics.rs | 20 +- .../partitioning/vtable-through-const.rs | 2 +- src/test/codegen/lifetime_start_end.rs | 16 +- src/test/codegen/slice-init.rs | 74 + src/test/codegen/stack-probes.rs | 3 + .../explore-issue-38412.rs | 1 - .../plugin-as-extern-crate.rs | 1 + .../proc-macro/attribute-with-error.rs | 52 + .../proc-macro/attributes-included.rs | 32 + .../auxiliary/attribute-with-error.rs | 24 + .../auxiliary/attributes-included.rs | 130 + .../proc-macro/derive-bad.rs | 1 + .../proc-macro/expand-to-unstable-2.rs | 1 + .../proc-macro/expand-to-unstable.rs | 1 + .../proc-macro/issue-38586.rs | 1 + .../proc-macro/item-error.rs | 1 + .../proc-macro/lints_in_proc_macros.rs | 1 + .../proc-macro/proc-macro-attributes.rs | 1 + src/test/compile-fail/E0010.rs | 1 + src/test/compile-fail/E0088.rs | 11 +- src/test/compile-fail/E0254.rs | 1 + src/test/compile-fail/E0259.rs | 1 + src/test/compile-fail/E0260.rs | 1 + src/test/compile-fail/E0394.rs | 2 + src/test/compile-fail/E0559.rs | 3 +- src/test/compile-fail/E0560.rs | 1 + src/test/compile-fail/E0624.rs | 22 + src/test/compile-fail/asm-bad-clobber.rs | 1 + src/test/compile-fail/asm-in-bad-modifier.rs | 1 + src/test/compile-fail/asm-misplaced-option.rs | 1 + src/test/compile-fail/asm-out-assign-imm.rs | 1 + src/test/compile-fail/asm-out-no-modifier.rs | 1 + src/test/compile-fail/asm-out-read-uninit.rs | 1 + .../associated-types-eq-expr-path.rs | 2 +- .../auxiliary/lint_unused_extern_crate5.rs | 9 + src/test/compile-fail/bad-lint-cap2.rs | 1 + src/test/compile-fail/bad-lint-cap3.rs | 1 + .../borrowck-borrow-from-temporary.rs | 4 +- .../check-static-values-constraints.rs | 1 + .../closure-expected-type/issue-38714.rs | 26 + .../compile-fail/conflicting-repr-hints.rs | 29 +- .../compile-fail/const-eval-overflow-2.rs | 2 +- .../compile-fail/const-match-pattern-arm.rs | 2 + src/test/compile-fail/const-size_of-cycle.rs | 18 + .../compile-fail/constructor-lifetime-args.rs | 36 + .../compile-fail/dollar-crate-is-keyword-2.rs | 2 +- .../enable-unstable-lib-feature.rs | 1 + ...gate-allow-internal-unsafe-nested-macro.rs | 27 + src/test/compile-fail/feature-gate-doc_cfg.rs | 12 + .../compile-fail/feature-gate-dropck-ugeh.rs | 1 - ...eature-gate-fn_must_use-cap-lints-allow.rs | 22 + .../compile-fail/feature-gate-fn_must_use.rs | 31 + .../issue-43106-gating-of-bench.rs | 25 + .../issue-43106-gating-of-builtin-attrs.rs | 879 +++ .../issue-43106-gating-of-deprecated.rs | 31 + .../issue-43106-gating-of-derive-2.rs | 47 + .../issue-43106-gating-of-derive.rs | 44 + .../issue-43106-gating-of-inline.rs | 37 + .../issue-43106-gating-of-macro_escape.rs | 17 + .../issue-43106-gating-of-macro_use.rs | 33 + ...issue-43106-gating-of-proc_macro_derive.rs | 42 + .../issue-43106-gating-of-rustc_deprecated.rs | 39 + .../issue-43106-gating-of-stable.rs | 38 + .../issue-43106-gating-of-test.rs | 22 + .../issue-43106-gating-of-unstable.rs | 38 + .../foreign-fn-return-lifetime.rs | 16 + src/test/compile-fail/import2.rs | 2 +- src/test/compile-fail/issue-11493.rs | 4 +- src/test/compile-fail/issue-12567.rs | 8 +- src/test/compile-fail/issue-14227.rs | 2 + src/test/compile-fail/issue-16538.rs | 2 + src/test/compile-fail/issue-1697.rs | 2 +- src/test/compile-fail/issue-17450.rs | 2 +- src/test/compile-fail/issue-17545.rs | 4 +- .../compile-fail/issue-17718-const-naming.rs | 1 + .../issue-17718-constants-not-static.rs | 4 +- .../compile-fail/issue-17718-references.rs | 2 + src/test/compile-fail/issue-17954.rs | 25 + src/test/compile-fail/issue-18937.rs | 2 +- src/test/compile-fail/issue-19922.rs | 3 +- src/test/compile-fail/issue-27592.rs | 4 +- src/test/compile-fail/issue-28075.rs | 2 +- src/test/compile-fail/issue-28113.rs | 2 + src/test/compile-fail/issue-28324.rs | 2 + src/test/compile-fail/issue-30560.rs | 4 +- src/test/compile-fail/issue-30730.rs | 1 + src/test/compile-fail/issue-32995.rs | 8 +- src/test/compile-fail/issue-33464.rs | 9 +- src/test/compile-fail/issue-33504.rs | 19 + src/test/compile-fail/issue-35675.rs | 67 - src/test/compile-fail/issue-36116.rs | 28 +- src/test/compile-fail/issue-36881.rs | 1 + src/test/compile-fail/issue-37515.rs | 1 + .../compile-fail/issue-39211.rs} | 20 +- src/test/compile-fail/issue-39970.rs | 31 + src/test/compile-fail/issue-40510-1.rs | 23 + .../{issue-34222.rs => issue-40510-2.rs} | 15 +- src/test/compile-fail/issue-40510-3.rs | 25 + src/test/compile-fail/issue-40510-4.rs | 25 + src/test/compile-fail/issue-42796.rs | 29 + src/test/compile-fail/issue-43023.rs | 28 + src/test/compile-fail/issue-43162.rs | 17 + src/test/compile-fail/issue-43431.rs | 24 + src/test/compile-fail/issue-43733-2.rs | 39 + src/test/compile-fail/issue-43733.rs | 41 + .../issue-43784-associated-type.rs | 25 + .../compile-fail/issue-43784-supertrait.rs | 18 + src/test/compile-fail/issue-7364.rs | 1 + src/test/compile-fail/lint-forbid-cmdline.rs | 1 - src/test/compile-fail/lint-removed-allow.rs | 3 +- src/test/compile-fail/lint-removed-cmdline.rs | 2 + src/test/compile-fail/lint-renamed-allow.rs | 3 +- .../compile-fail/lint-stability-deprecated.rs | 224 +- src/test/compile-fail/lint-type-overflow2.rs | 20 +- .../compile-fail/lint-unused-extern-crate.rs | 5 +- .../compile-fail/lint-unused-mut-variables.rs | 20 + .../compile-fail/lint-uppercase-variables.rs | 1 + src/test/compile-fail/liveness-unused.rs | 1 + .../macro-reexport-malformed-1.rs | 1 + .../macro-reexport-malformed-2.rs | 1 + .../macro-reexport-malformed-3.rs | 1 + src/test/compile-fail/macro-use-bad-args-1.rs | 1 + src/test/compile-fail/macro-use-bad-args-2.rs | 1 + .../compile-fail/macro-with-seps-err-msg.rs | 3 +- .../method-call-lifetime-args-lint.rs | 97 + .../method-call-lifetime-args-subst-index.rs | 25 + .../method-call-lifetime-args-unresolved.rs | 13 + .../compile-fail/method-call-lifetime-args.rs | 82 + .../compile-fail/method-call-type-binding.rs | 2 +- src/test/compile-fail/move-out-of-array-1.rs | 2 +- src/test/compile-fail/move-out-of-slice-1.rs | 2 +- .../compile-fail/never-assign-dead-code.rs | 14 +- src/test/compile-fail/no-std-inject.rs | 1 + src/test/compile-fail/numeric-fields.rs | 3 +- src/test/compile-fail/parse-error-correct.rs | 1 + .../compile-fail/placement-expr-unstable.rs | 2 - .../compile-fail/private-inferred-type.rs | 1 + .../compile-fail/private-type-in-interface.rs | 1 + ...ions-lifetime-of-struct-or-enum-variant.rs | 8 +- src/test/compile-fail/regions-ret.rs | 4 +- .../regions-var-type-out-of-scope.rs | 4 +- .../repr-packed-contains-align.rs | 44 +- ...e-conflict-extern-crate-vs-extern-crate.rs | 1 + .../compile-fail/resolve_self_super_hint.rs | 9 +- .../stability-attribute-issue-43027.rs | 20 + ...ity-attribute-non-staged-force-unstable.rs | 16 + .../compile-fail/static-mut-not-constant.rs | 3 +- .../compile-fail/static-reference-to-fn-2.rs | 11 +- src/test/compile-fail/static-region-bound.rs | 4 +- .../compile-fail/struct-fields-too-many.rs | 1 + .../struct-path-self-type-mismatch.rs | 12 +- .../compile-fail/suggest-private-fields.rs | 3 +- src/test/compile-fail/super-at-top-level.rs | 2 +- src/test/compile-fail/thread-local-in-ctfe.rs | 38 + src/test/compile-fail/trait-test-2.rs | 5 +- .../unboxed-closure-sugar-used-on-struct-3.rs | 12 +- .../unboxed-closure-sugar-used-on-struct.rs | 1 + src/test/compile-fail/union/union-fields.rs | 1 + .../compile-fail/unreachable-try-pattern.rs | 15 +- src/test/compile-fail/unresolved-import.rs | 6 +- src/test/compile-fail/unused-attr.rs | 2 +- src/test/compile-fail/use-from-trait-xc.rs | 6 +- src/test/compile-fail/use-from-trait.rs | 4 +- src/test/compile-fail/use-mod-4.rs | 2 +- src/test/compile-fail/useless_comment.rs | 30 + src/test/incremental/hashes/inherent_impls.rs | 2 +- src/test/incremental/hashes/trait_defs.rs | 2 +- src/test/mir-opt/README.md | 11 +- src/test/mir-opt/basic_assignment.rs | 32 +- src/test/mir-opt/box_expr.rs | 88 + src/test/mir-opt/deaggregator_test.rs | 4 +- src/test/mir-opt/end_region_4.rs | 43 +- src/test/mir-opt/end_region_5.rs | 26 +- src/test/mir-opt/end_region_6.rs | 27 +- src/test/mir-opt/end_region_7.rs | 34 +- src/test/mir-opt/end_region_8.rs | 74 +- src/test/mir-opt/issue-41110.rs | 17 +- src/test/mir-opt/issue-43457.rs | 55 + src/test/mir-opt/validate_1.rs | 77 + src/test/mir-opt/validate_2.rs | 27 + src/test/mir-opt/validate_3.rs | 50 + src/test/mir-opt/validate_4.rs | 83 + src/test/mir-opt/validate_5.rs | 63 + src/test/parse-fail/default.rs | 2 +- .../type-parameters-in-field-exprs.rs | 6 +- src/test/run-fail/issue-29798.rs | 20 + .../run-make/alloc-extern-crates/Makefile | 2 +- .../run-make/codegen-options-parsing/Makefile | 4 +- src/test/run-make/issue-35164/Makefile | 4 + src/test/run-make/issue-35164/main.rs | 15 + .../run-make/issue-35164/submodule/mod.rs | 13 + src/test/run-make/issue-37839/Makefile | 6 + src/test/run-make/issue-37893/Makefile | 6 + src/test/run-make/issue-38237/Makefile | 6 + src/test/run-make/issues-41478-43796/Makefile | 8 + src/test/run-make/issues-41478-43796/a.rs | 19 + src/test/run-make/llvm-pass/Makefile | 7 + src/test/run-make/llvm-phase/test.rs | 6 +- src/test/run-make/print-cfg/Makefile | 2 +- .../run-make/rustc-macro-dep-files/Makefile | 6 + src/test/run-make/save-analysis/Makefile | 1 - src/test/run-make/tools.mk | 2 +- src/test/run-make/treat-err-as-bug/Makefile | 5 + src/test/run-make/treat-err-as-bug/err.rs | 13 + .../auxiliary/plugin_args.rs | 7 +- src/test/run-pass-fulldeps/issue-13560.rs | 1 - src/test/run-pass-fulldeps/issue-40663.rs | 1 + .../run-pass-fulldeps/linkage-visibility.rs | 1 - .../run-pass-fulldeps/proc-macro/add-impl.rs | 1 + .../proc-macro/append-impl.rs | 1 + .../run-pass-fulldeps/proc-macro/attr-args.rs | 1 + .../proc-macro/bang-macro.rs | 1 + .../proc-macro/count_compound_ops.rs | 1 + .../run-pass-fulldeps/proc-macro/crate-var.rs | 1 + .../proc-macro/derive-same-struct.rs | 1 + .../proc-macro/hygiene_example.rs | 1 + .../proc-macro/issue-39889.rs | 1 + .../proc-macro/issue-40001.rs | 1 + .../run-pass-fulldeps/proc-macro/load-two.rs | 1 + .../proc-macro/use-reexport.rs | 1 + src/test/run-pass-valgrind/issue-44800.rs | 25 + src/test/run-pass/align-struct.rs | 18 + .../auxiliary/thread-local-extern-static.rs | 11 +- src/test/run-pass/backtrace-debuginfo-aux.rs | 2 +- src/test/run-pass/backtrace-debuginfo.rs | 4 +- src/test/run-pass/big-literals.rs | 8 +- src/test/run-pass/builtin-clone-unwind.rs | 65 + src/test/run-pass/builtin-clone.rs | 54 + src/test/run-pass/catch-expr.rs | 6 + src/test/run-pass/conditional-compile-arch.rs | 3 + src/test/run-pass/const-size_of-align_of.rs | 60 + src/test/run-pass/core-run-destroy.rs | 1 - .../deprecation-in-force-unstable.rs} | 10 +- .../run-pass/discriminant_value-wrapper.rs | 2 - src/test/run-pass/dynamic-drop.rs | 6 + src/test/run-pass/foreign-call-no-runtime.rs | 1 - .../run-pass/impl-trait/auxiliary/xcrate.rs | 11 + src/test/run-pass/impl-trait/xcrate.rs | 1 + src/test/run-pass/issue-13304.rs | 1 - src/test/run-pass/issue-16272.rs | 1 - src/test/run-pass/issue-20091.rs | 1 - src/test/run-pass/issue-23433.rs | 2 +- src/test/run-pass/issue-29516.rs | 28 + src/test/run-pass/issue-30756.rs | 2 +- src/test/run-pass/issue-34780.rs | 19 + src/test/run-pass/issue-37725.rs | 8 +- src/test/run-pass/issue-39467.rs | 19 + src/test/run-pass/issue-39720.rs | 31 + src/test/run-pass/issue-39827.rs | 42 + src/test/run-pass/issue-43132.rs | 74 + .../E0035.rs => run-pass/issue-43205.rs} | 11 +- src/test/run-pass/issue-43357.rs | 19 + src/test/run-pass/issue-43853.rs | 24 + src/test/run-pass/issue-43910.rs | 16 + .../lint-unknown-lints-at-crate-level.rs | 16 + src/test/run-pass/loop-break-value.rs | 6 + .../run-pass/optional_comma_in_match_arm.rs | 47 + .../process-spawn-with-unicode-params.rs | 1 - src/test/run-pass/rvalue-static-promotion.rs | 2 - .../run-pass/sigpipe-should-be-ignored.rs | 1 - src/test/run-pass/sse2.rs | 1 + src/test/run-pass/stack-probes-lto.rs | 1 + src/test/run-pass/stack-probes.rs | 1 + .../run-pass/thread-local-extern-static.rs | 18 +- src/test/run-pass/type-id-higher-rank.rs | 5 + src/test/run-pass/union/union-align.rs | 72 + src/test/run-pass/union/union-basic.rs | 7 +- src/test/run-pass/weird-exprs.rs | 5 + .../run-pass/wrong-hashset-issue-42918.rs | 38 + src/test/rustdoc/doc-cfg.rs | 47 + src/test/rustdoc/issue-19181.rs | 15 + src/test/rustdoc/issue-43701.rs | 15 + src/test/rustdoc/issue-43869.rs | 32 + src/test/rustdoc/nul-error.rs | 18 + .../E0036.rs => rustdoc/remove-duplicates.rs} | 20 +- .../ui-fulldeps/lint-plugin-cmdline-allow.rs | 1 + .../lint-plugin-cmdline-allow.stderr | 11 +- .../block-must-not-have-result-res.stderr | 2 + src/test/ui/block-result/issue-13624.stderr | 2 + src/test/ui/block-result/issue-22645.stderr | 3 + src/test/ui/block-result/issue-5500.stderr | 2 + .../unexpected-return-on-unit.stderr | 4 +- src/test/ui/borrowck/mut-borrow-in-loop.rs | 40 + .../ui/borrowck/mut-borrow-in-loop.stderr | 29 + .../ui/borrowck/mut-borrow-outside-loop.rs | 26 + .../borrowck/mut-borrow-outside-loop.stderr | 23 + src/test/ui/check_match/issue-43253.rs | 3 +- src/test/ui/check_match/issue-43253.stderr | 18 +- .../proj-outlives-region.stderr | 2 +- .../ui/compare-method/region-unrelated.stderr | 2 +- src/test/ui/const-eval/issue-43197.rs | 21 + src/test/ui/const-eval/issue-43197.stderr | 28 + src/test/ui/did_you_mean/issue-31424.stderr | 4 +- src/test/ui/did_you_mean/issue-34126.stderr | 2 +- src/test/ui/did_you_mean/issue-34337.stderr | 2 +- .../issue-36798_unknown_field.stderr | 2 + src/test/ui/did_you_mean/issue-37139.stderr | 2 +- .../issue-42599_available_fields_note.rs | 43 + .../issue-42599_available_fields_note.stderr | 30 + src/test/ui/did_you_mean/issue-42764.rs | 22 + src/test/ui/did_you_mean/issue-42764.stderr | 17 + src/test/ui/extern-const.rs | 19 + src/test/ui/extern-const.stderr | 8 + src/test/ui/issue-13483.rs | 25 + src/test/ui/issue-13483.stderr | 14 + src/test/ui/issue-22644.stderr | 66 +- src/test/ui/issue-35675.rs | 16 + src/test/ui/issue-35675.stderr | 24 +- src/test/ui/issue-35976.rs | 31 + src/test/ui/issue-35976.stderr | 11 + .../issue-40402-1.stderr | 2 +- src/test/ui/issue-41652/issue_41652.stderr | 1 + src/test/ui/issue-42954.stderr | 12 +- src/test/ui/issue-43806.rs | 33 + src/test/ui/issue-44023.rs | 16 + src/test/ui/issue-44023.stderr | 13 + src/test/ui/issue-44078.rs | 13 + src/test/ui/issue-44078.stderr | 10 + .../ex2b-push-no-existing-names.stderr | 23 +- .../ex3-both-anon-regions-2.rs | 15 + .../ex3-both-anon-regions-2.stderr | 10 + .../ex3-both-anon-regions-3.rs | 16 + .../ex3-both-anon-regions-3.stderr | 18 + ...x3-both-anon-regions-both-are-structs-2.rs | 19 + ...oth-anon-regions-both-are-structs-2.stderr | 10 + ...x3-both-anon-regions-both-are-structs-3.rs | 19 + ...oth-anon-regions-both-are-structs-3.stderr | 12 + .../ex3-both-anon-regions-both-are-structs.rs | 18 + ...-both-anon-regions-both-are-structs.stderr | 10 + .../ex3-both-anon-regions-one-is-struct-2.rs | 17 + ...3-both-anon-regions-one-is-struct-2.stderr | 12 + .../ex3-both-anon-regions-one-is-struct-3.rs | 17 + ...3-both-anon-regions-one-is-struct-3.stderr | 10 + .../ex3-both-anon-regions-one-is-struct.rs | 20 + ...ex3-both-anon-regions-one-is-struct.stderr | 10 + ...3-both-anon-regions-return-type-is-anon.rs | 22 + ...th-anon-regions-return-type-is-anon.stderr | 23 + .../ex3-both-anon-regions-self-is-anon.rs | 22 + .../ex3-both-anon-regions-self-is-anon.stderr | 23 + .../ex3-both-anon-regions-using-impl-items.rs | 18 + ...-both-anon-regions-using-impl-items.stderr | 10 + .../lifetime-errors/ex3-both-anon-regions.rs | 15 + .../ex3-both-anon-regions.stderr | 10 + .../ui/lifetimes/borrowck-let-suggestion.rs | 2 +- .../lifetimes/borrowck-let-suggestion.stderr | 7 +- src/test/ui/lint/fn_must_use.rs | 35 + src/test/ui/lint/fn_must_use.stderr | 18 + src/test/ui/lint/outer-forbid.rs | 8 +- src/test/ui/lint/outer-forbid.stderr | 24 +- src/test/ui/macros/format-foreign.stderr | 21 +- src/test/ui/macros/format-unused-lables.rs | 27 + .../ui/macros/format-unused-lables.stderr | 53 + .../macros/macro_path_as_generic_bound.stderr | 2 +- src/test/ui/method-call-lifetime-args-lint.rs | 31 + .../ui/method-call-lifetime-args-lint.stderr | 31 + src/test/ui/method-call-lifetime-args.rs | 25 + src/test/ui/method-call-lifetime-args.stderr | 26 + src/test/ui/mismatched_types/E0281.stderr | 2 +- src/test/ui/mismatched_types/abridged.stderr | 14 + .../mismatched_types/closure-mismatch.stderr | 2 +- .../ui/mismatched_types/issue-19109.stderr | 2 +- .../ui/mismatched_types/issue-36053-2.stderr | 2 +- .../unboxed-closures-vtable-mismatch.stderr | 2 +- src/test/ui/mut-ref.rs | 16 + src/test/ui/mut-ref.stderr | 8 + src/test/ui/path-lookahead.rs | 2 + src/test/ui/path-lookahead.stderr | 27 +- src/test/ui/print_type_sizes/nullable.rs | 16 +- src/test/ui/reachable/expr_unary.stderr | 14 +- .../ui/resolve/enums-are-namespaced-xc.stderr | 6 +- src/test/ui/resolve/issue-21221-3.stderr | 2 +- src/test/ui/resolve/issue-21221-4.stderr | 2 +- src/test/ui/resolve/issue-2356.stderr | 4 +- src/test/ui/resolve/issue-3907.stderr | 2 +- src/test/ui/resolve/issue-5035.stderr | 2 +- .../ui/resolve/privacy-struct-ctor.stderr | 12 +- .../ui/resolve/token-error-correct-3.stderr | 2 +- .../unresolved_static_type_field.stderr | 2 +- .../ui/resolve/use_suggestion_placement.rs | 36 + .../resolve/use_suggestion_placement.stderr | 49 + src/test/ui/span/E0536.stderr | 2 +- src/test/ui/span/E0537.stderr | 2 +- .../span/borrowck-let-suggestion-suffixes.rs | 8 +- .../borrowck-let-suggestion-suffixes.stderr | 30 +- .../span}/import-ty-params.rs | 7 +- src/test/ui/span/import-ty-params.stderr | 14 + src/test/ui/span/issue-15480.rs | 4 +- src/test/ui/span/issue-15480.stderr | 10 +- src/test/ui/span/issue-24690.rs | 7 +- src/test/ui/span/issue-24690.stderr | 50 +- src/test/ui/span/issue-35987.stderr | 2 +- src/test/ui/span/issue-7575.stderr | 3 + src/test/ui/span/macro-span-replacement.rs | 21 + .../ui/span/macro-span-replacement.stderr | 16 + src/test/ui/span/macro-ty-params.rs | 21 + src/test/ui/span/macro-ty-params.stderr | 32 + src/test/ui/span/multispan-import-lint.rs | 2 + src/test/ui/span/multispan-import-lint.stderr | 11 +- .../ui/span/non-existing-module-import.rs | 13 + .../ui/span/non-existing-module-import.stderr | 8 + .../regions-close-over-borrowed-ref-in-obj.rs | 4 +- ...ions-close-over-borrowed-ref-in-obj.stderr | 12 +- src/test/ui/span/slice-borrow.rs | 2 +- src/test/ui/span/slice-borrow.stderr | 6 +- .../span/visibility-ty-params.rs} | 4 + src/test/ui/span/visibility-ty-params.stderr | 14 + ...ype-ascription-instead-of-statement-end.rs | 20 + ...ascription-instead-of-statement-end.stderr | 16 + .../type-ascription-with-fn-call.rs | 18 + .../type-ascription-with-fn-call.stderr | 13 + src/test/ui/trait-method-private.rs | 30 + src/test/ui/trait-method-private.stderr | 12 + .../ui/type-check/assignment-in-if.stderr | 8 +- src/test/ui/union-fields.rs | 42 + src/test/ui/union-fields.stderr | 32 + src/test/ui/union-sized-field.rs | 26 + src/test/ui/union-sized-field.stderr | 32 + src/tools/build-manifest/Cargo.toml | 5 +- src/tools/build-manifest/src/main.rs | 66 +- src/tools/compiletest/src/common.rs | 2 +- src/tools/compiletest/src/errors.rs | 6 +- src/tools/compiletest/src/json.rs | 4 +- src/tools/compiletest/src/main.rs | 20 +- src/tools/compiletest/src/procsrv.rs | 106 - src/tools/compiletest/src/runtest.rs | 560 +- src/tools/error_index_generator/Cargo.toml | 3 + src/tools/remote-test-client/src/main.rs | 47 +- src/tools/rustdoc/Cargo.toml | 14 + .../rustdoc.rs => tools/rustdoc/main.rs} | 4 +- src/tools/tidy/src/deps.rs | 1 + src/tools/tidy/src/features.rs | 69 +- src/tools/tidy/src/pal.rs | 1 + src/tools/tidy/src/style.rs | 2 +- src/vendor/ar/.cargo-checksum.json | 1 + src/vendor/{crossbeam => ar}/.cargo-ok | 0 src/vendor/ar/.gitignore | 4 + src/vendor/ar/Cargo.toml | 9 + src/vendor/ar/LICENSE | 21 + src/vendor/ar/README.md | 18 + src/vendor/ar/examples/create.rs | 41 + src/vendor/ar/examples/extract.rs | 43 + src/vendor/ar/rustfmt.toml | 6 + src/vendor/ar/src/lib.rs | 769 ++ src/vendor/backtrace-sys/.cargo-checksum.json | 2 +- src/vendor/backtrace-sys/Cargo.toml | 32 +- src/vendor/backtrace-sys/Cargo.toml.orig | 18 + src/vendor/backtrace-sys/build.rs | 59 +- src/vendor/clap/.cargo-checksum.json | 2 +- src/vendor/clap/.github/ISSUE_TEMPLATE.md | 4 +- src/vendor/clap/.travis.yml | 2 +- src/vendor/clap/CHANGELOG.md | 32 +- src/vendor/clap/Cargo.toml | 68 +- src/vendor/clap/Cargo.toml.orig | 14 +- src/vendor/clap/README.md | 41 +- src/vendor/clap/src/app/help.rs | 174 +- src/vendor/clap/src/app/macros.rs | 2 +- src/vendor/clap/src/app/mod.rs | 8 +- src/vendor/clap/src/app/parser.rs | 38 +- src/vendor/clap/src/app/validator.rs | 32 +- src/vendor/clap/src/args/arg_builder/flag.rs | 4 +- .../clap/src/args/arg_builder/option.rs | 32 +- .../clap/src/args/arg_builder/positional.rs | 18 +- src/vendor/clap/src/args/arg_matches.rs | 18 +- src/vendor/clap/src/macros.rs | 2 +- src/vendor/crossbeam/.cargo-checksum.json | 1 - src/vendor/crossbeam/.gitignore | 13 - src/vendor/crossbeam/.travis.yml | 36 - src/vendor/crossbeam/CHANGELOG.md | 11 - src/vendor/crossbeam/Cargo.toml | 15 - src/vendor/crossbeam/README.md | 38 - src/vendor/crossbeam/scala-bench/bench.scala | 195 - src/vendor/crossbeam/src/bin/bench.rs | 165 - .../crossbeam/src/bin/extra_impls/mod.rs | 1 - .../src/bin/extra_impls/mpsc_queue.rs | 155 - src/vendor/crossbeam/src/bin/stress-msq.rs | 36 - src/vendor/crossbeam/src/lib.rs | 54 - src/vendor/crossbeam/src/mem/cache_padded.rs | 149 - src/vendor/crossbeam/src/mem/epoch/atomic.rs | 181 - src/vendor/crossbeam/src/mem/epoch/garbage.rs | 144 - src/vendor/crossbeam/src/mem/epoch/global.rs | 95 - src/vendor/crossbeam/src/mem/epoch/guard.rs | 56 - src/vendor/crossbeam/src/mem/epoch/local.rs | 38 - src/vendor/crossbeam/src/mem/epoch/mod.rs | 265 - .../crossbeam/src/mem/epoch/participant.rs | 133 - .../crossbeam/src/mem/epoch/participants.rs | 120 - src/vendor/crossbeam/src/mem/mod.rs | 9 - src/vendor/crossbeam/src/scoped.rs | 275 - src/vendor/crossbeam/src/sync/arc_cell.rs | 90 - .../crossbeam/src/sync/atomic_option.rs | 38 - src/vendor/crossbeam/src/sync/chase_lev.rs | 602 -- src/vendor/crossbeam/src/sync/mod.rs | 14 - src/vendor/crossbeam/src/sync/ms_queue.rs | 511 -- src/vendor/crossbeam/src/sync/seg_queue.rs | 251 - .../crossbeam/src/sync/treiber_stack.rs | 98 - src/vendor/libc/.cargo-checksum.json | 2 +- src/vendor/libc/Cargo.toml | 34 +- src/vendor/libc/Cargo.toml.orig | 25 + src/vendor/libc/appveyor.yml | 6 + .../aarch64-unknown-linux-gnu/Dockerfile | 2 +- .../arm-unknown-linux-gnueabihf/Dockerfile | 2 +- .../docker/i686-unknown-linux-gnu/Dockerfile | 2 +- .../docker/i686-unknown-linux-musl/Dockerfile | 2 +- .../docker/mips-unknown-linux-gnu/Dockerfile | 2 +- .../docker/mips-unknown-linux-musl/Dockerfile | 2 +- .../mips64-unknown-linux-gnuabi64/Dockerfile | 2 +- .../mipsel-unknown-linux-musl/Dockerfile | 2 +- .../powerpc-unknown-linux-gnu/Dockerfile | 2 +- .../powerpc64-unknown-linux-gnu/Dockerfile | 2 +- .../docker/s390x-unknown-linux-gnu/Dockerfile | 2 +- .../x86_64-unknown-linux-gnu/Dockerfile | 2 +- .../x86_64-unknown-linux-musl/Dockerfile | 2 +- src/vendor/libc/src/redox.rs | 10 +- src/vendor/libc/src/unix/bsd/apple/b32.rs | 33 + src/vendor/libc/src/unix/bsd/apple/b64.rs | 38 + src/vendor/libc/src/unix/bsd/apple/mod.rs | 118 +- .../src/unix/bsd/freebsdlike/dragonfly/mod.rs | 38 + .../libc/src/unix/bsd/freebsdlike/mod.rs | 17 +- src/vendor/libc/src/unix/bsd/mod.rs | 13 + .../libc/src/unix/bsd/netbsdlike/mod.rs | 17 +- .../src/unix/bsd/netbsdlike/netbsd/mod.rs | 32 + .../unix/bsd/netbsdlike/openbsdlike/mod.rs | 41 + .../bsd/netbsdlike/openbsdlike/openbsd.rs | 2 + src/vendor/libc/src/unix/haiku/mod.rs | 7 +- src/vendor/libc/src/unix/mod.rs | 15 + src/vendor/libc/src/unix/newlib/mod.rs | 6 +- .../libc/src/unix/notbsd/android/b32/arm.rs | 1 + .../libc/src/unix/notbsd/android/b32/x86.rs | 1 + .../src/unix/notbsd/android/b64/aarch64.rs | 1 + .../src/unix/notbsd/android/b64/x86_64.rs | 1 + .../libc/src/unix/notbsd/android/mod.rs | 33 +- .../libc/src/unix/notbsd/linux/mips/mips32.rs | 2 + .../libc/src/unix/notbsd/linux/mips/mips64.rs | 2 + .../libc/src/unix/notbsd/linux/mips/mod.rs | 9 +- src/vendor/libc/src/unix/notbsd/linux/mod.rs | 7 + .../src/unix/notbsd/linux/musl/b32/arm.rs | 1 + .../src/unix/notbsd/linux/musl/b32/mips.rs | 1 + .../src/unix/notbsd/linux/musl/b32/x86.rs | 1 + .../src/unix/notbsd/linux/musl/b64/mod.rs | 1 + .../libc/src/unix/notbsd/linux/musl/mod.rs | 9 +- .../src/unix/notbsd/linux/other/b32/arm.rs | 2 + .../src/unix/notbsd/linux/other/b32/mod.rs | 2 + .../unix/notbsd/linux/other/b32/powerpc.rs | 1 + .../src/unix/notbsd/linux/other/b32/x86.rs | 404 +- .../unix/notbsd/linux/other/b64/aarch64.rs | 3 +- .../src/unix/notbsd/linux/other/b64/mod.rs | 2 + .../unix/notbsd/linux/other/b64/powerpc64.rs | 3 +- .../unix/notbsd/linux/other/b64/sparc64.rs | 2 + .../src/unix/notbsd/linux/other/b64/x86_64.rs | 370 +- .../libc/src/unix/notbsd/linux/other/mod.rs | 8 - .../libc/src/unix/notbsd/linux/s390x.rs | 94 +- src/vendor/libc/src/unix/notbsd/mod.rs | 17 +- src/vendor/libc/src/unix/solaris/mod.rs | 7 +- src/vendor/libc/src/unix/uclibc/mod.rs | 9 +- src/vendor/lzma-sys/.cargo-checksum.json | 2 +- src/vendor/lzma-sys/Cargo.toml | 37 +- src/vendor/lzma-sys/Cargo.toml.orig | 24 + src/vendor/lzma-sys/build.rs | 18 +- src/vendor/lzma-sys/src/lib.rs | 4 + src/vendor/num-traits/.cargo-checksum.json | 2 +- src/vendor/num-traits/Cargo.toml | 3 +- src/vendor/num-traits/src/float.rs | 18 +- src/vendor/quote/.cargo-checksum.json | 1 + src/vendor/{toml-0.1.30 => quote}/.cargo-ok | 0 src/vendor/quote/Cargo.toml | 10 + .../{crossbeam => quote}/LICENSE-APACHE | 0 src/vendor/{toml-0.1.30 => quote}/LICENSE-MIT | 2 +- src/vendor/quote/README.md | 104 + src/vendor/quote/src/ident.rs | 57 + src/vendor/quote/src/lib.rs | 252 + src/vendor/quote/src/to_tokens.rs | 357 + src/vendor/quote/src/tokens.rs | 156 + src/vendor/quote/tests/test.rs | 360 + src/vendor/rls-data/.cargo-checksum.json | 2 +- src/vendor/rls-data/Cargo.toml | 15 +- src/vendor/rls-data/Cargo.toml.orig | 5 +- src/vendor/rls-data/src/config.rs | 28 + src/vendor/rls-data/src/lib.rs | 37 +- .../rustc-demangle/.cargo-checksum.json | 2 +- src/vendor/rustc-demangle/Cargo.toml | 24 +- src/vendor/rustc-demangle/Cargo.toml.orig | 12 + src/vendor/rustc-demangle/src/lib.rs | 11 + src/vendor/serde/.cargo-checksum.json | 2 +- src/vendor/serde/Cargo.toml | 16 +- src/vendor/serde/Cargo.toml.orig | 2 +- src/vendor/serde/src/de/impls.rs | 124 +- src/vendor/serde/src/lib.rs | 2 +- src/vendor/serde_derive/.cargo-checksum.json | 1 + .../empty.toml => serde_derive/.cargo-ok} | 0 src/vendor/serde_derive/Cargo.toml | 40 + src/vendor/serde_derive/Cargo.toml.orig | 24 + .../LICENSE-APACHE | 0 src/vendor/serde_derive/LICENSE-MIT | 25 + src/vendor/serde_derive/README.md | 81 + src/vendor/serde_derive/src/bound.rs | 256 + src/vendor/serde_derive/src/de.rs | 1761 +++++ src/vendor/serde_derive/src/fragment.rs | 75 + src/vendor/serde_derive/src/lib.rs | 60 + src/vendor/serde_derive/src/ser.rs | 935 +++ .../.cargo-checksum.json | 1 + src/vendor/serde_derive_internals/.cargo-ok | 0 src/vendor/serde_derive_internals/Cargo.toml | 19 + .../serde_derive_internals/LICENSE-APACHE | 201 + src/vendor/serde_derive_internals/LICENSE-MIT | 25 + src/vendor/serde_derive_internals/README.md | 81 + src/vendor/serde_derive_internals/src/ast.rs | 143 + src/vendor/serde_derive_internals/src/attr.rs | 1215 +++ src/vendor/serde_derive_internals/src/case.rs | 132 + .../serde_derive_internals/src/check.rs | 96 + src/vendor/serde_derive_internals/src/ctxt.rs | 53 + src/vendor/serde_derive_internals/src/lib.rs | 22 + src/vendor/syn/.cargo-checksum.json | 1 + src/vendor/syn/.cargo-ok | 0 src/vendor/syn/Cargo.toml | 30 + src/vendor/syn/LICENSE-APACHE | 201 + src/vendor/syn/LICENSE-MIT | 25 + src/vendor/syn/README.md | 205 + src/vendor/syn/src/aster/generics.rs | 231 + src/vendor/syn/src/aster/ident.rs | 39 + src/vendor/syn/src/aster/invoke.rs | 16 + src/vendor/syn/src/aster/lifetime.rs | 103 + src/vendor/syn/src/aster/mod.rs | 33 + src/vendor/syn/src/aster/path.rs | 327 + src/vendor/syn/src/aster/qpath.rs | 143 + src/vendor/syn/src/aster/ty.rs | 488 ++ src/vendor/syn/src/aster/ty_param.rs | 262 + src/vendor/syn/src/aster/where_predicate.rs | 259 + src/vendor/syn/src/attr.rs | 305 + src/vendor/syn/src/constant.rs | 180 + src/vendor/syn/src/data.rs | 297 + src/vendor/syn/src/derive.rs | 124 + src/vendor/syn/src/escape.rs | 294 + src/vendor/syn/src/expr.rs | 1721 ++++ src/vendor/syn/src/fold.rs | 942 +++ src/vendor/syn/src/generics.rs | 513 ++ src/vendor/syn/src/ident.rs | 129 + src/vendor/syn/src/item.rs | 1477 ++++ src/vendor/syn/src/krate.rs | 57 + src/vendor/syn/src/lib.rs | 211 + src/vendor/syn/src/lit.rs | 484 ++ src/vendor/syn/src/mac.rs | 430 + src/vendor/syn/src/op.rs | 192 + src/vendor/syn/src/ty.rs | 844 ++ src/vendor/syn/src/visit.rs | 778 ++ src/vendor/synom/.cargo-checksum.json | 1 + src/vendor/synom/.cargo-ok | 0 src/vendor/synom/Cargo.toml | 20 + src/vendor/synom/LICENSE-APACHE | 201 + src/vendor/synom/LICENSE-MIT | 25 + src/vendor/synom/README.md | 199 + src/vendor/synom/src/helper.rs | 543 ++ src/vendor/synom/src/lib.rs | 1225 +++ src/vendor/synom/src/space.rs | 99 + src/vendor/textwrap/.appveyor.yml | 34 +- src/vendor/textwrap/.cargo-checksum.json | 2 +- src/vendor/textwrap/.rustfmt.toml | 6 + src/vendor/textwrap/Cargo.toml | 48 +- src/vendor/textwrap/Cargo.toml.orig | 33 + src/vendor/textwrap/README.md | 32 +- src/vendor/textwrap/benches/linear.rs | 84 +- src/vendor/textwrap/examples/layout.rs | 8 +- src/vendor/textwrap/src/lib.rs | 311 +- src/vendor/toml-0.1.30/.cargo-checksum.json | 1 - src/vendor/toml-0.1.30/.travis.yml | 31 - src/vendor/toml-0.1.30/README.md | 26 - src/vendor/toml-0.1.30/examples/toml2json.rs | 57 - src/vendor/toml-0.1.30/src/decoder/mod.rs | 235 - .../src/decoder/rustc_serialize.rs | 371 - src/vendor/toml-0.1.30/src/decoder/serde.rs | 544 -- src/vendor/toml-0.1.30/src/display.rs | 201 - src/vendor/toml-0.1.30/src/encoder/mod.rs | 214 - .../src/encoder/rustc_serialize.rs | 716 -- src/vendor/toml-0.1.30/src/encoder/serde.rs | 108 - src/vendor/toml-0.1.30/src/lib.rs | 492 -- src/vendor/toml-0.1.30/src/parser.rs | 1581 ---- src/vendor/toml-0.1.30/tests/README.md | 1 - .../array-mixed-types-ints-and-floats.json | 15 - src/vendor/toml-0.1.30/tests/invalid.rs | 108 - .../array-mixed-types-arrays-and-ints.toml | 1 - .../array-mixed-types-ints-and-floats.toml | 1 - .../array-mixed-types-strings-and-ints.toml | 1 - .../invalid/datetime-malformed-no-leads.toml | 1 - .../invalid/datetime-malformed-no-secs.toml | 1 - .../invalid/datetime-malformed-no-t.toml | 1 - .../invalid/datetime-malformed-no-z.toml | 1 - .../datetime-malformed-with-milli.toml | 1 - .../tests/invalid/duplicate-key-table.toml | 5 - .../tests/invalid/duplicate-keys.toml | 2 - .../tests/invalid/duplicate-tables.toml | 2 - .../tests/invalid/empty-implicit-table.toml | 1 - .../tests/invalid/empty-table.toml | 1 - .../tests/invalid/float-no-leading-zero.toml | 2 - .../invalid/float-no-trailing-digits.toml | 2 - .../tests/invalid/key-after-array.toml | 1 - .../tests/invalid/key-after-table.toml | 1 - .../toml-0.1.30/tests/invalid/key-empty.toml | 1 - .../toml-0.1.30/tests/invalid/key-hash.toml | 1 - .../tests/invalid/key-newline.toml | 2 - .../tests/invalid/key-open-bracket.toml | 1 - .../invalid/key-single-open-bracket.toml | 1 - .../toml-0.1.30/tests/invalid/key-space.toml | 1 - .../tests/invalid/key-start-bracket.toml | 3 - .../tests/invalid/key-two-equals.toml | 1 - .../tests/invalid/string-bad-byte-escape.toml | 1 - .../tests/invalid/string-bad-escape.toml | 1 - .../tests/invalid/string-byte-escapes.toml | 1 - .../tests/invalid/string-no-close.toml | 1 - .../tests/invalid/table-array-implicit.toml | 14 - .../table-array-malformed-bracket.toml | 2 - .../invalid/table-array-malformed-empty.toml | 2 - .../tests/invalid/table-empty.toml | 1 - .../invalid/table-nested-brackets-close.toml | 2 - .../invalid/table-nested-brackets-open.toml | 2 - .../tests/invalid/table-whitespace.toml | 1 - .../tests/invalid/table-with-pound.toml | 2 - .../invalid/text-after-array-entries.toml | 4 - .../tests/invalid/text-after-integer.toml | 1 - .../tests/invalid/text-after-string.toml | 1 - .../tests/invalid/text-after-table.toml | 1 - .../invalid/text-before-array-separator.toml | 4 - .../tests/invalid/text-in-array.toml | 5 - src/vendor/toml-0.1.30/tests/valid.rs | 195 - .../toml-0.1.30/tests/valid/array-empty.json | 11 - .../toml-0.1.30/tests/valid/array-empty.toml | 1 - .../tests/valid/array-nospaces.json | 10 - .../tests/valid/array-nospaces.toml | 1 - .../tests/valid/arrays-hetergeneous.json | 19 - .../tests/valid/arrays-hetergeneous.toml | 1 - .../tests/valid/arrays-nested.json | 13 - .../tests/valid/arrays-nested.toml | 1 - .../toml-0.1.30/tests/valid/arrays.json | 34 - .../toml-0.1.30/tests/valid/arrays.toml | 8 - src/vendor/toml-0.1.30/tests/valid/bool.json | 4 - src/vendor/toml-0.1.30/tests/valid/bool.toml | 2 - .../tests/valid/comments-everywhere.json | 12 - .../tests/valid/comments-everywhere.toml | 24 - .../toml-0.1.30/tests/valid/datetime.json | 3 - .../toml-0.1.30/tests/valid/datetime.toml | 1 - src/vendor/toml-0.1.30/tests/valid/empty.json | 1 - .../toml-0.1.30/tests/valid/example-bom.toml | 5 - .../tests/valid/example-v0.3.0.json | 1 - .../tests/valid/example-v0.3.0.toml | 182 - .../tests/valid/example-v0.4.0.json | 1 - .../tests/valid/example-v0.4.0.toml | 235 - .../toml-0.1.30/tests/valid/example.json | 14 - .../toml-0.1.30/tests/valid/example.toml | 5 - .../toml-0.1.30/tests/valid/example2.json | 1 - .../toml-0.1.30/tests/valid/example2.toml | 47 - src/vendor/toml-0.1.30/tests/valid/float.json | 4 - src/vendor/toml-0.1.30/tests/valid/float.toml | 2 - .../toml-0.1.30/tests/valid/hard_example.json | 1 - .../toml-0.1.30/tests/valid/hard_example.toml | 33 - .../valid/implicit-and-explicit-after.json | 10 - .../valid/implicit-and-explicit-after.toml | 5 - .../valid/implicit-and-explicit-before.json | 10 - .../valid/implicit-and-explicit-before.toml | 5 - .../tests/valid/implicit-groups.json | 9 - .../tests/valid/implicit-groups.toml | 2 - .../toml-0.1.30/tests/valid/integer.json | 4 - .../toml-0.1.30/tests/valid/integer.toml | 2 - .../tests/valid/key-equals-nospace.json | 3 - .../tests/valid/key-equals-nospace.toml | 1 - .../toml-0.1.30/tests/valid/key-space.json | 3 - .../toml-0.1.30/tests/valid/key-space.toml | 1 - .../tests/valid/key-special-chars.json | 5 - .../tests/valid/key-special-chars.toml | 1 - .../tests/valid/key-with-pound.json | 3 - .../tests/valid/key-with-pound.toml | 1 - .../toml-0.1.30/tests/valid/long-float.json | 4 - .../toml-0.1.30/tests/valid/long-float.toml | 2 - .../toml-0.1.30/tests/valid/long-integer.json | 4 - .../toml-0.1.30/tests/valid/long-integer.toml | 2 - .../tests/valid/multiline-string.json | 30 - .../tests/valid/multiline-string.toml | 23 - .../tests/valid/raw-multiline-string.json | 14 - .../tests/valid/raw-multiline-string.toml | 9 - .../toml-0.1.30/tests/valid/raw-string.json | 30 - .../toml-0.1.30/tests/valid/raw-string.toml | 7 - .../toml-0.1.30/tests/valid/string-empty.json | 6 - .../toml-0.1.30/tests/valid/string-empty.toml | 1 - .../tests/valid/string-escapes.json | 50 - .../tests/valid/string-escapes.toml | 12 - .../tests/valid/string-simple.json | 6 - .../tests/valid/string-simple.toml | 1 - .../tests/valid/string-with-pound.json | 7 - .../tests/valid/string-with-pound.toml | 2 - .../tests/valid/table-array-implicit.json | 7 - .../tests/valid/table-array-implicit.toml | 2 - .../tests/valid/table-array-many.json | 16 - .../tests/valid/table-array-many.toml | 11 - .../tests/valid/table-array-nest.json | 18 - .../tests/valid/table-array-nest.toml | 17 - .../tests/valid/table-array-one.json | 8 - .../tests/valid/table-array-one.toml | 3 - .../toml-0.1.30/tests/valid/table-empty.json | 3 - .../toml-0.1.30/tests/valid/table-empty.toml | 1 - .../tests/valid/table-sub-empty.json | 3 - .../tests/valid/table-sub-empty.toml | 2 - .../tests/valid/table-whitespace.json | 3 - .../tests/valid/table-whitespace.toml | 1 - .../tests/valid/table-with-pound.json | 5 - .../tests/valid/table-with-pound.toml | 2 - .../tests/valid/unicode-escape.json | 4 - .../tests/valid/unicode-escape.toml | 2 - .../tests/valid/unicode-literal.json | 3 - .../tests/valid/unicode-literal.toml | 1 - src/vendor/toml/.cargo-checksum.json | 2 +- src/vendor/toml/.travis.yml | 2 +- src/vendor/toml/Cargo.toml | 45 +- .../Cargo.toml => toml/Cargo.toml.orig} | 20 +- src/vendor/toml/src/de.rs | 25 +- src/vendor/toml/src/lib.rs | 4 +- src/vendor/toml/src/ser.rs | 456 +- src/vendor/toml/src/value.rs | 12 +- src/vendor/toml/tests/pretty.rs | 308 + src/vendor/toml/tests/serde.rs | 67 + src/vendor/toml/tests/valid.rs | 58 +- .../toml/tests/valid/table-multi-empty.json | 5 + .../toml/tests/valid/table-multi-empty.toml | 5 + .../unicode-segmentation/.cargo-checksum.json | 2 +- src/vendor/unicode-segmentation/Cargo.toml | 2 +- .../unicode-segmentation/scripts/unicode.py | 12 +- .../unicode-segmentation/src/grapheme.rs | 836 +- src/vendor/unicode-segmentation/src/lib.rs | 1 + src/vendor/unicode-segmentation/src/tables.rs | 136 +- src/vendor/unicode-segmentation/src/test.rs | 4 + src/vendor/unicode-xid/.cargo-checksum.json | 1 + src/vendor/unicode-xid/.cargo-ok | 0 .../{toml-0.1.30 => unicode-xid}/.gitignore | 1 + src/vendor/unicode-xid/.travis.yml | 25 + src/vendor/unicode-xid/COPYRIGHT | 7 + src/vendor/unicode-xid/Cargo.toml | 26 + src/vendor/unicode-xid/LICENSE-APACHE | 201 + .../{crossbeam => unicode-xid}/LICENSE-MIT | 0 src/vendor/unicode-xid/README.md | 34 + src/vendor/unicode-xid/scripts/unicode.py | 187 + src/vendor/unicode-xid/src/lib.rs | 87 + src/vendor/unicode-xid/src/tables.rs | 426 + src/vendor/unicode-xid/src/tests.rs | 113 + version | 2 +- 2023 files changed, 114868 insertions(+), 42253 deletions(-) create mode 100644 src/bootstrap/bootstrap_test.py create mode 100644 src/bootstrap/builder.rs create mode 100644 src/bootstrap/cache.rs delete mode 100644 src/bootstrap/config.toml.example delete mode 100644 src/bootstrap/step.rs create mode 100644 src/bootstrap/tool.rs create mode 100755 src/ci/docker/cross/install-x86_64-redox.sh create mode 100644 src/ci/docker/disabled/aarch64-gnu/Dockerfile create mode 100644 src/ci/docker/disabled/aarch64-gnu/config create mode 100644 src/ci/docker/disabled/dist-x86_64-redox/Dockerfile rename src/ci/docker/{armhf-gnu/addentropy.c => scripts/qemu-bare-bones-addentropy.c} (100%) rename src/ci/docker/{armhf-gnu/rcS => scripts/qemu-bare-bones-rcS} (100%) rename src/{ci/docker/disabled/wasm32/node.sh => doc/book/second-edition/convert-quotes.sh} (76%) create mode 100644 src/doc/book/second-edition/nostarch/chapter19.md create mode 100644 src/doc/book/second-edition/nostarch/chapter20.md create mode 100644 src/doc/book/second-edition/src/img/trpl14-03.png create mode 100644 src/doc/book/second-edition/src/img/trpl14-05.png create mode 100644 src/doc/book/second-edition/src/img/trpl14-07.png create mode 100644 src/doc/book/second-edition/src/img/trpl14-10.png rename src/doc/book/second-edition/src/img/{hello.png => trpl20-01.png} (100%) create mode 100644 src/doc/book/second-edition/tools/src/bin/convert_quotes.rs create mode 100644 src/doc/nomicon/src/what-unsafe-does.md create mode 100644 src/doc/reference/README.md create mode 100644 src/doc/reference/src/glossory.md create mode 100644 src/doc/reference/src/keywords.md delete mode 100644 src/doc/rustdoc/src/in-source-directives.md delete mode 100644 src/doc/rustdoc/src/plugins.md create mode 100644 src/doc/rustdoc/src/the-doc-attribute.md create mode 100644 src/doc/unstable-book/src/language-features/doc-cfg.md delete mode 100644 src/doc/unstable-book/src/language-features/rvalue-static-promotion.md create mode 100644 src/doc/unstable-book/src/library-features/compiler-builtins-lib.md delete mode 100644 src/doc/unstable-book/src/library-features/compiler-fences.md delete mode 100644 src/doc/unstable-book/src/library-features/iterator-for-each.md create mode 100644 src/doc/unstable-book/src/library-features/string-retain.md create mode 100644 src/etc/natvis/intrinsic.natvis create mode 100644 src/etc/platform-intrinsics/powerpc.json create mode 100644 src/jemalloc/.appveyor.yml create mode 100644 src/jemalloc/.travis.yml create mode 100644 src/jemalloc/include/jemalloc/internal/ph.h create mode 100644 src/jemalloc/include/jemalloc/internal/spin.h create mode 100644 src/jemalloc/include/jemalloc/internal/witness.h create mode 100755 src/jemalloc/scripts/gen_travis.py create mode 100644 src/jemalloc/src/spin.c create mode 100644 src/jemalloc/src/witness.c create mode 100644 src/jemalloc/test/integration/chunk.sh create mode 100644 src/jemalloc/test/integration/mallocx.sh create mode 100644 src/jemalloc/test/integration/xallocx.sh create mode 100644 src/jemalloc/test/unit/a0.c create mode 100644 src/jemalloc/test/unit/arena_reset.c create mode 100644 src/jemalloc/test/unit/arena_reset.sh create mode 100644 src/jemalloc/test/unit/decay.sh create mode 100644 src/jemalloc/test/unit/extent_quantize.c create mode 100644 src/jemalloc/test/unit/fork.c create mode 100644 src/jemalloc/test/unit/junk.sh create mode 100644 src/jemalloc/test/unit/junk_alloc.sh create mode 100644 src/jemalloc/test/unit/junk_free.sh create mode 100644 src/jemalloc/test/unit/lg_chunk.sh create mode 100644 src/jemalloc/test/unit/pack.c create mode 100644 src/jemalloc/test/unit/pack.sh create mode 100644 src/jemalloc/test/unit/pages.c create mode 100644 src/jemalloc/test/unit/ph.c create mode 100644 src/jemalloc/test/unit/prof_accum.sh create mode 100644 src/jemalloc/test/unit/prof_active.sh create mode 100644 src/jemalloc/test/unit/prof_gdump.sh create mode 100644 src/jemalloc/test/unit/prof_idump.sh create mode 100644 src/jemalloc/test/unit/prof_reset.sh create mode 100644 src/jemalloc/test/unit/prof_tctx.sh create mode 100644 src/jemalloc/test/unit/prof_thread_name.sh create mode 100644 src/jemalloc/test/unit/quarantine.sh create mode 100644 src/jemalloc/test/unit/stats_print.c create mode 100644 src/jemalloc/test/unit/witness.c create mode 100644 src/jemalloc/test/unit/zero.sh delete mode 100644 src/liballoc_system/old.rs create mode 100644 src/liblibc/ci/docker/aarch64-unknown-linux-musl/Dockerfile create mode 100644 src/liblibc/ci/docker/asmjs-unknown-emscripten/Dockerfile create mode 100644 src/liblibc/ci/docker/s390x-unknown-linux-gnu/Dockerfile create mode 100644 src/liblibc/ci/docker/wasm32-unknown-emscripten/Dockerfile create mode 100755 src/liblibc/ci/docker/wasm32-unknown-emscripten/node-wrapper.sh create mode 100644 src/liblibc/ci/docker/x86_64-rumprun-netbsd/runtest.rs delete mode 100644 src/liblibc/ci/docker/x86_64-unknown-openbsd/Dockerfile rename src/{ci/docker/scripts/dumb-init.sh => liblibc/ci/emscripten-entry.sh} (70%) mode change 100644 => 100755 create mode 100644 src/liblibc/ci/emscripten.sh mode change 100644 => 100755 src/liblibc/ci/run-docker.sh create mode 100644 src/liblibc/ci/runtest-android.rs delete mode 100644 src/liblibc/libc-test/build-generated.rs delete mode 100644 src/liblibc/libc-test/generate-files/Cargo.toml delete mode 100644 src/liblibc/libc-test/run-generated-Cargo.toml delete mode 100644 src/liblibc/libc-test/src/main-generated.rs create mode 100644 src/liblibc/libc-test/test/linux_fcntl.rs rename src/liblibc/libc-test/{src => test}/main.rs (60%) delete mode 100644 src/liblibc/src/redox.rs create mode 100644 src/liblibc/src/redox/mod.rs create mode 100644 src/liblibc/src/redox/net.rs create mode 100644 src/liblibc/src/unix/newlib/arm/mod.rs create mode 100644 src/liblibc/src/unix/newlib/mod.rs create mode 100644 src/liblibc/src/unix/notbsd/emscripten.rs delete mode 100644 src/liblibc/src/unix/notbsd/linux/musl/b32/asmjs.rs create mode 100644 src/liblibc/src/unix/uclibc/x86_64/l4re.rs create mode 100644 src/librustc/build.rs create mode 100644 src/librustc/infer/error_reporting/anon_anon_conflict.rs create mode 100644 src/librustc/infer/error_reporting/util.rs create mode 100644 src/librustc/lint/levels.rs delete mode 100644 src/librustc/lint/table.rs create mode 100644 src/librustc/ty/binding.rs create mode 100644 src/librustc_apfloat/Cargo.toml create mode 100644 src/librustc_apfloat/ieee.rs create mode 100644 src/librustc_apfloat/lib.rs create mode 100644 src/librustc_apfloat/ppc.rs create mode 100644 src/librustc_apfloat/tests/ieee.rs create mode 100644 src/librustc_apfloat/tests/ppc.rs rename src/{test/compile-fail/feature-gate-rvalue_static_promotion.rs => librustc_back/build.rs} (73%) create mode 100644 src/librustc_back/target/l4re_base.rs create mode 100644 src/librustc_back/target/x86_64_unknown_l4re_uclibc.rs delete mode 100644 src/librustc_data_structures/control_flow_graph/reachable/mod.rs delete mode 100644 src/librustc_data_structures/control_flow_graph/reachable/test.rs delete mode 100644 src/librustc_data_structures/control_flow_graph/transpose.rs delete mode 100644 src/librustc_data_structures/fmt_wrap.rs delete mode 100644 src/librustc_data_structures/fnv.rs delete mode 100644 src/librustc_data_structures/ivar.rs create mode 100644 src/librustc_driver/build.rs create mode 100644 src/librustc_driver/profile/mod.rs create mode 100644 src/librustc_driver/profile/trace.rs create mode 100644 src/librustc_incremental/build.rs create mode 100644 src/librustc_metadata/build.rs create mode 100644 src/librustc_mir/borrow_check.rs create mode 100644 src/librustc_mir/dataflow/impls/borrows.rs create mode 100644 src/librustc_mir/dataflow/move_paths/builder.rs create mode 100644 src/librustc_mir/transform/add_validation.rs create mode 100644 src/librustc_mir/transform/nll.rs create mode 100644 src/librustc_mir/util/borrowck_errors.rs create mode 100644 src/librustc_platform_intrinsics/powerpc.rs delete mode 100644 src/librustc_save_analysis/json_api_dumper.rs create mode 100644 src/librustc_trans/build.rs create mode 100644 src/librustc_trans/time_graph.rs create mode 100644 src/librustc_trans_utils/Cargo.toml create mode 100644 src/librustc_trans_utils/lib.rs create mode 100644 src/librustc_trans_utils/link.rs create mode 100644 src/librustdoc/clean/cfg.rs create mode 100644 src/librustdoc/passes/propagate_doc_cfg.rs delete mode 100644 src/libstd/sys/redox/backtrace.rs create mode 100644 src/libstd/sys/redox/backtrace/mod.rs create mode 100644 src/libstd/sys/redox/backtrace/printing.rs create mode 100644 src/libstd/sys/redox/backtrace/tracing.rs create mode 100644 src/libstd/sys/redox/ext/thread.rs create mode 100644 src/libsyntax/build.rs create mode 100644 src/test/codegen/slice-init.rs create mode 100644 src/test/compile-fail-fulldeps/proc-macro/attribute-with-error.rs create mode 100644 src/test/compile-fail-fulldeps/proc-macro/attributes-included.rs create mode 100644 src/test/compile-fail-fulldeps/proc-macro/auxiliary/attribute-with-error.rs create mode 100644 src/test/compile-fail-fulldeps/proc-macro/auxiliary/attributes-included.rs create mode 100644 src/test/compile-fail/E0624.rs create mode 100644 src/test/compile-fail/auxiliary/lint_unused_extern_crate5.rs create mode 100644 src/test/compile-fail/closure-expected-type/issue-38714.rs create mode 100644 src/test/compile-fail/const-size_of-cycle.rs create mode 100644 src/test/compile-fail/constructor-lifetime-args.rs create mode 100644 src/test/compile-fail/feature-gate-allow-internal-unsafe-nested-macro.rs create mode 100644 src/test/compile-fail/feature-gate-doc_cfg.rs create mode 100644 src/test/compile-fail/feature-gate-fn_must_use-cap-lints-allow.rs create mode 100644 src/test/compile-fail/feature-gate-fn_must_use.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-bench.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-builtin-attrs.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-deprecated.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-derive-2.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-derive.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-inline.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-macro_escape.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-macro_use.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-proc_macro_derive.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-rustc_deprecated.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-stable.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-test.rs create mode 100644 src/test/compile-fail/feature-gate/issue-43106-gating-of-unstable.rs create mode 100644 src/test/compile-fail/foreign-fn-return-lifetime.rs create mode 100644 src/test/compile-fail/issue-17954.rs create mode 100644 src/test/compile-fail/issue-33504.rs delete mode 100644 src/test/compile-fail/issue-35675.rs rename src/{driver/driver.rs => test/compile-fail/issue-39211.rs} (61%) create mode 100644 src/test/compile-fail/issue-39970.rs create mode 100644 src/test/compile-fail/issue-40510-1.rs rename src/test/compile-fail/{issue-34222.rs => issue-40510-2.rs} (78%) create mode 100644 src/test/compile-fail/issue-40510-3.rs create mode 100644 src/test/compile-fail/issue-40510-4.rs create mode 100644 src/test/compile-fail/issue-42796.rs create mode 100644 src/test/compile-fail/issue-43023.rs create mode 100644 src/test/compile-fail/issue-43162.rs create mode 100644 src/test/compile-fail/issue-43431.rs create mode 100644 src/test/compile-fail/issue-43733-2.rs create mode 100644 src/test/compile-fail/issue-43733.rs create mode 100644 src/test/compile-fail/issue-43784-associated-type.rs create mode 100644 src/test/compile-fail/issue-43784-supertrait.rs create mode 100644 src/test/compile-fail/method-call-lifetime-args-lint.rs create mode 100644 src/test/compile-fail/method-call-lifetime-args-subst-index.rs create mode 100644 src/test/compile-fail/method-call-lifetime-args-unresolved.rs create mode 100644 src/test/compile-fail/method-call-lifetime-args.rs create mode 100644 src/test/compile-fail/stability-attribute-issue-43027.rs create mode 100644 src/test/compile-fail/stability-attribute-non-staged-force-unstable.rs create mode 100644 src/test/compile-fail/thread-local-in-ctfe.rs rename src/test/{parse-fail => compile-fail}/unboxed-closure-sugar-used-on-struct-3.rs (69%) create mode 100644 src/test/compile-fail/useless_comment.rs create mode 100644 src/test/mir-opt/box_expr.rs create mode 100644 src/test/mir-opt/issue-43457.rs create mode 100644 src/test/mir-opt/validate_1.rs create mode 100644 src/test/mir-opt/validate_2.rs create mode 100644 src/test/mir-opt/validate_3.rs create mode 100644 src/test/mir-opt/validate_4.rs create mode 100644 src/test/mir-opt/validate_5.rs create mode 100644 src/test/run-fail/issue-29798.rs create mode 100644 src/test/run-make/issue-35164/Makefile create mode 100644 src/test/run-make/issue-35164/main.rs create mode 100644 src/test/run-make/issue-35164/submodule/mod.rs create mode 100644 src/test/run-make/issues-41478-43796/Makefile create mode 100644 src/test/run-make/issues-41478-43796/a.rs create mode 100644 src/test/run-make/treat-err-as-bug/Makefile create mode 100644 src/test/run-make/treat-err-as-bug/err.rs create mode 100644 src/test/run-pass-valgrind/issue-44800.rs create mode 100644 src/test/run-pass/builtin-clone-unwind.rs create mode 100644 src/test/run-pass/builtin-clone.rs create mode 100644 src/test/run-pass/const-size_of-align_of.rs rename src/{rustc/libc_shim/build.rs => test/run-pass/deprecation-in-force-unstable.rs} (72%) create mode 100644 src/test/run-pass/issue-29516.rs create mode 100644 src/test/run-pass/issue-34780.rs create mode 100644 src/test/run-pass/issue-39467.rs create mode 100644 src/test/run-pass/issue-39720.rs create mode 100644 src/test/run-pass/issue-39827.rs create mode 100644 src/test/run-pass/issue-43132.rs rename src/test/{compile-fail/E0035.rs => run-pass/issue-43205.rs} (72%) create mode 100644 src/test/run-pass/issue-43357.rs create mode 100644 src/test/run-pass/issue-43853.rs create mode 100644 src/test/run-pass/issue-43910.rs create mode 100644 src/test/run-pass/lint-unknown-lints-at-crate-level.rs create mode 100644 src/test/run-pass/optional_comma_in_match_arm.rs create mode 100644 src/test/run-pass/union/union-align.rs create mode 100644 src/test/run-pass/wrong-hashset-issue-42918.rs create mode 100644 src/test/rustdoc/doc-cfg.rs create mode 100644 src/test/rustdoc/issue-19181.rs create mode 100644 src/test/rustdoc/issue-43701.rs create mode 100644 src/test/rustdoc/issue-43869.rs create mode 100644 src/test/rustdoc/nul-error.rs rename src/test/{compile-fail/E0036.rs => rustdoc/remove-duplicates.rs} (65%) create mode 100644 src/test/ui/borrowck/mut-borrow-in-loop.rs create mode 100644 src/test/ui/borrowck/mut-borrow-in-loop.stderr create mode 100644 src/test/ui/borrowck/mut-borrow-outside-loop.rs create mode 100644 src/test/ui/borrowck/mut-borrow-outside-loop.stderr create mode 100644 src/test/ui/const-eval/issue-43197.rs create mode 100644 src/test/ui/const-eval/issue-43197.stderr create mode 100644 src/test/ui/did_you_mean/issue-42599_available_fields_note.rs create mode 100644 src/test/ui/did_you_mean/issue-42599_available_fields_note.stderr create mode 100644 src/test/ui/did_you_mean/issue-42764.rs create mode 100644 src/test/ui/did_you_mean/issue-42764.stderr create mode 100644 src/test/ui/extern-const.rs create mode 100644 src/test/ui/extern-const.stderr create mode 100644 src/test/ui/issue-13483.rs create mode 100644 src/test/ui/issue-13483.stderr create mode 100644 src/test/ui/issue-35976.rs create mode 100644 src/test/ui/issue-35976.stderr create mode 100644 src/test/ui/issue-43806.rs create mode 100644 src/test/ui/issue-44023.rs create mode 100644 src/test/ui/issue-44023.stderr create mode 100644 src/test/ui/issue-44078.rs create mode 100644 src/test/ui/issue-44078.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-2.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-2.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-3.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-3.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-both-are-structs-2.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-both-are-structs-2.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-both-are-structs-3.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-both-are-structs-3.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-both-are-structs.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-both-are-structs.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-one-is-struct-2.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-one-is-struct-2.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-one-is-struct-3.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-one-is-struct-3.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-one-is-struct.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-one-is-struct.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-return-type-is-anon.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-return-type-is-anon.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-self-is-anon.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-self-is-anon.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-using-impl-items.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions-using-impl-items.stderr create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions.rs create mode 100644 src/test/ui/lifetime-errors/ex3-both-anon-regions.stderr create mode 100644 src/test/ui/lint/fn_must_use.rs create mode 100644 src/test/ui/lint/fn_must_use.stderr create mode 100644 src/test/ui/macros/format-unused-lables.rs create mode 100644 src/test/ui/macros/format-unused-lables.stderr create mode 100644 src/test/ui/method-call-lifetime-args-lint.rs create mode 100644 src/test/ui/method-call-lifetime-args-lint.stderr create mode 100644 src/test/ui/method-call-lifetime-args.rs create mode 100644 src/test/ui/method-call-lifetime-args.stderr create mode 100644 src/test/ui/mut-ref.rs create mode 100644 src/test/ui/mut-ref.stderr create mode 100644 src/test/ui/resolve/use_suggestion_placement.rs create mode 100644 src/test/ui/resolve/use_suggestion_placement.stderr rename src/test/{compile-fail => ui/span}/import-ty-params.rs (77%) create mode 100644 src/test/ui/span/import-ty-params.stderr create mode 100644 src/test/ui/span/macro-span-replacement.rs create mode 100644 src/test/ui/span/macro-span-replacement.stderr create mode 100644 src/test/ui/span/macro-ty-params.rs create mode 100644 src/test/ui/span/macro-ty-params.stderr create mode 100644 src/test/ui/span/non-existing-module-import.rs create mode 100644 src/test/ui/span/non-existing-module-import.stderr rename src/test/{compile-fail/privacy/restricted/ty-params.rs => ui/span/visibility-ty-params.rs} (90%) create mode 100644 src/test/ui/span/visibility-ty-params.stderr create mode 100644 src/test/ui/suggestions/type-ascription-instead-of-statement-end.rs create mode 100644 src/test/ui/suggestions/type-ascription-instead-of-statement-end.stderr create mode 100644 src/test/ui/suggestions/type-ascription-with-fn-call.rs create mode 100644 src/test/ui/suggestions/type-ascription-with-fn-call.stderr create mode 100644 src/test/ui/trait-method-private.rs create mode 100644 src/test/ui/trait-method-private.stderr create mode 100644 src/test/ui/union-fields.rs create mode 100644 src/test/ui/union-fields.stderr create mode 100644 src/test/ui/union-sized-field.rs create mode 100644 src/test/ui/union-sized-field.stderr delete mode 100644 src/tools/compiletest/src/procsrv.rs create mode 100644 src/tools/rustdoc/Cargo.toml rename src/{rustc/rustdoc.rs => tools/rustdoc/main.rs} (83%) create mode 100644 src/vendor/ar/.cargo-checksum.json rename src/vendor/{crossbeam => ar}/.cargo-ok (100%) create mode 100644 src/vendor/ar/.gitignore create mode 100644 src/vendor/ar/Cargo.toml create mode 100644 src/vendor/ar/LICENSE create mode 100644 src/vendor/ar/README.md create mode 100644 src/vendor/ar/examples/create.rs create mode 100644 src/vendor/ar/examples/extract.rs create mode 100644 src/vendor/ar/rustfmt.toml create mode 100644 src/vendor/ar/src/lib.rs create mode 100644 src/vendor/backtrace-sys/Cargo.toml.orig delete mode 100644 src/vendor/crossbeam/.cargo-checksum.json delete mode 100644 src/vendor/crossbeam/.gitignore delete mode 100644 src/vendor/crossbeam/.travis.yml delete mode 100644 src/vendor/crossbeam/CHANGELOG.md delete mode 100644 src/vendor/crossbeam/Cargo.toml delete mode 100644 src/vendor/crossbeam/README.md delete mode 100644 src/vendor/crossbeam/scala-bench/bench.scala delete mode 100755 src/vendor/crossbeam/src/bin/bench.rs delete mode 100644 src/vendor/crossbeam/src/bin/extra_impls/mod.rs delete mode 100644 src/vendor/crossbeam/src/bin/extra_impls/mpsc_queue.rs delete mode 100644 src/vendor/crossbeam/src/bin/stress-msq.rs delete mode 100644 src/vendor/crossbeam/src/lib.rs delete mode 100644 src/vendor/crossbeam/src/mem/cache_padded.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/atomic.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/garbage.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/global.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/guard.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/local.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/mod.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/participant.rs delete mode 100644 src/vendor/crossbeam/src/mem/epoch/participants.rs delete mode 100644 src/vendor/crossbeam/src/mem/mod.rs delete mode 100644 src/vendor/crossbeam/src/scoped.rs delete mode 100644 src/vendor/crossbeam/src/sync/arc_cell.rs delete mode 100644 src/vendor/crossbeam/src/sync/atomic_option.rs delete mode 100644 src/vendor/crossbeam/src/sync/chase_lev.rs delete mode 100644 src/vendor/crossbeam/src/sync/mod.rs delete mode 100644 src/vendor/crossbeam/src/sync/ms_queue.rs delete mode 100644 src/vendor/crossbeam/src/sync/seg_queue.rs delete mode 100644 src/vendor/crossbeam/src/sync/treiber_stack.rs create mode 100644 src/vendor/libc/Cargo.toml.orig create mode 100644 src/vendor/lzma-sys/Cargo.toml.orig create mode 100644 src/vendor/quote/.cargo-checksum.json rename src/vendor/{toml-0.1.30 => quote}/.cargo-ok (100%) create mode 100644 src/vendor/quote/Cargo.toml rename src/vendor/{crossbeam => quote}/LICENSE-APACHE (100%) rename src/vendor/{toml-0.1.30 => quote}/LICENSE-MIT (95%) create mode 100644 src/vendor/quote/README.md create mode 100644 src/vendor/quote/src/ident.rs create mode 100644 src/vendor/quote/src/lib.rs create mode 100644 src/vendor/quote/src/to_tokens.rs create mode 100644 src/vendor/quote/src/tokens.rs create mode 100644 src/vendor/quote/tests/test.rs create mode 100644 src/vendor/rls-data/src/config.rs create mode 100644 src/vendor/rustc-demangle/Cargo.toml.orig create mode 100644 src/vendor/serde_derive/.cargo-checksum.json rename src/vendor/{toml-0.1.30/tests/valid/empty.toml => serde_derive/.cargo-ok} (100%) create mode 100644 src/vendor/serde_derive/Cargo.toml create mode 100644 src/vendor/serde_derive/Cargo.toml.orig rename src/vendor/{toml-0.1.30 => serde_derive}/LICENSE-APACHE (100%) create mode 100644 src/vendor/serde_derive/LICENSE-MIT create mode 100644 src/vendor/serde_derive/README.md create mode 100644 src/vendor/serde_derive/src/bound.rs create mode 100644 src/vendor/serde_derive/src/de.rs create mode 100644 src/vendor/serde_derive/src/fragment.rs create mode 100644 src/vendor/serde_derive/src/lib.rs create mode 100644 src/vendor/serde_derive/src/ser.rs create mode 100644 src/vendor/serde_derive_internals/.cargo-checksum.json create mode 100644 src/vendor/serde_derive_internals/.cargo-ok create mode 100644 src/vendor/serde_derive_internals/Cargo.toml create mode 100644 src/vendor/serde_derive_internals/LICENSE-APACHE create mode 100644 src/vendor/serde_derive_internals/LICENSE-MIT create mode 100644 src/vendor/serde_derive_internals/README.md create mode 100644 src/vendor/serde_derive_internals/src/ast.rs create mode 100644 src/vendor/serde_derive_internals/src/attr.rs create mode 100644 src/vendor/serde_derive_internals/src/case.rs create mode 100644 src/vendor/serde_derive_internals/src/check.rs create mode 100644 src/vendor/serde_derive_internals/src/ctxt.rs create mode 100644 src/vendor/serde_derive_internals/src/lib.rs create mode 100644 src/vendor/syn/.cargo-checksum.json create mode 100644 src/vendor/syn/.cargo-ok create mode 100644 src/vendor/syn/Cargo.toml create mode 100644 src/vendor/syn/LICENSE-APACHE create mode 100644 src/vendor/syn/LICENSE-MIT create mode 100644 src/vendor/syn/README.md create mode 100644 src/vendor/syn/src/aster/generics.rs create mode 100644 src/vendor/syn/src/aster/ident.rs create mode 100644 src/vendor/syn/src/aster/invoke.rs create mode 100644 src/vendor/syn/src/aster/lifetime.rs create mode 100644 src/vendor/syn/src/aster/mod.rs create mode 100644 src/vendor/syn/src/aster/path.rs create mode 100644 src/vendor/syn/src/aster/qpath.rs create mode 100644 src/vendor/syn/src/aster/ty.rs create mode 100644 src/vendor/syn/src/aster/ty_param.rs create mode 100644 src/vendor/syn/src/aster/where_predicate.rs create mode 100644 src/vendor/syn/src/attr.rs create mode 100644 src/vendor/syn/src/constant.rs create mode 100644 src/vendor/syn/src/data.rs create mode 100644 src/vendor/syn/src/derive.rs create mode 100644 src/vendor/syn/src/escape.rs create mode 100644 src/vendor/syn/src/expr.rs create mode 100644 src/vendor/syn/src/fold.rs create mode 100644 src/vendor/syn/src/generics.rs create mode 100644 src/vendor/syn/src/ident.rs create mode 100644 src/vendor/syn/src/item.rs create mode 100644 src/vendor/syn/src/krate.rs create mode 100644 src/vendor/syn/src/lib.rs create mode 100644 src/vendor/syn/src/lit.rs create mode 100644 src/vendor/syn/src/mac.rs create mode 100644 src/vendor/syn/src/op.rs create mode 100644 src/vendor/syn/src/ty.rs create mode 100644 src/vendor/syn/src/visit.rs create mode 100644 src/vendor/synom/.cargo-checksum.json create mode 100644 src/vendor/synom/.cargo-ok create mode 100644 src/vendor/synom/Cargo.toml create mode 100644 src/vendor/synom/LICENSE-APACHE create mode 100644 src/vendor/synom/LICENSE-MIT create mode 100644 src/vendor/synom/README.md create mode 100644 src/vendor/synom/src/helper.rs create mode 100644 src/vendor/synom/src/lib.rs create mode 100644 src/vendor/synom/src/space.rs create mode 100644 src/vendor/textwrap/.rustfmt.toml create mode 100644 src/vendor/textwrap/Cargo.toml.orig delete mode 100644 src/vendor/toml-0.1.30/.cargo-checksum.json delete mode 100644 src/vendor/toml-0.1.30/.travis.yml delete mode 100644 src/vendor/toml-0.1.30/README.md delete mode 100644 src/vendor/toml-0.1.30/examples/toml2json.rs delete mode 100644 src/vendor/toml-0.1.30/src/decoder/mod.rs delete mode 100644 src/vendor/toml-0.1.30/src/decoder/rustc_serialize.rs delete mode 100644 src/vendor/toml-0.1.30/src/decoder/serde.rs delete mode 100644 src/vendor/toml-0.1.30/src/display.rs delete mode 100644 src/vendor/toml-0.1.30/src/encoder/mod.rs delete mode 100644 src/vendor/toml-0.1.30/src/encoder/rustc_serialize.rs delete mode 100644 src/vendor/toml-0.1.30/src/encoder/serde.rs delete mode 100644 src/vendor/toml-0.1.30/src/lib.rs delete mode 100644 src/vendor/toml-0.1.30/src/parser.rs delete mode 100644 src/vendor/toml-0.1.30/tests/README.md delete mode 100644 src/vendor/toml-0.1.30/tests/invalid-encoder/array-mixed-types-ints-and-floats.json delete mode 100644 src/vendor/toml-0.1.30/tests/invalid.rs delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/array-mixed-types-arrays-and-ints.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/array-mixed-types-ints-and-floats.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/array-mixed-types-strings-and-ints.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/datetime-malformed-no-leads.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/datetime-malformed-no-secs.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/datetime-malformed-no-t.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/datetime-malformed-no-z.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/datetime-malformed-with-milli.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/duplicate-key-table.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/duplicate-keys.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/duplicate-tables.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/empty-implicit-table.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/empty-table.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/float-no-leading-zero.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/float-no-trailing-digits.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-after-array.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-after-table.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-empty.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-hash.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-newline.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-open-bracket.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-single-open-bracket.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-space.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-start-bracket.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/key-two-equals.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/string-bad-byte-escape.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/string-bad-escape.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/string-byte-escapes.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/string-no-close.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-array-implicit.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-array-malformed-bracket.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-array-malformed-empty.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-empty.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-nested-brackets-close.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-nested-brackets-open.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-whitespace.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/table-with-pound.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/text-after-array-entries.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/text-after-integer.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/text-after-string.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/text-after-table.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/text-before-array-separator.toml delete mode 100644 src/vendor/toml-0.1.30/tests/invalid/text-in-array.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid.rs delete mode 100644 src/vendor/toml-0.1.30/tests/valid/array-empty.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/array-empty.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/array-nospaces.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/array-nospaces.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/arrays-hetergeneous.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/arrays-hetergeneous.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/arrays-nested.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/arrays-nested.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/arrays.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/arrays.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/bool.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/bool.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/comments-everywhere.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/comments-everywhere.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/datetime.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/datetime.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/empty.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example-bom.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example-v0.3.0.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example-v0.3.0.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example-v0.4.0.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example-v0.4.0.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example2.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/example2.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/float.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/float.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/hard_example.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/hard_example.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/implicit-and-explicit-after.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/implicit-and-explicit-after.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/implicit-and-explicit-before.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/implicit-and-explicit-before.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/implicit-groups.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/implicit-groups.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/integer.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/integer.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-equals-nospace.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-equals-nospace.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-space.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-space.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-special-chars.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-special-chars.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-with-pound.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/key-with-pound.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/long-float.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/long-float.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/long-integer.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/long-integer.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/multiline-string.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/multiline-string.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/raw-multiline-string.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/raw-multiline-string.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/raw-string.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/raw-string.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-empty.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-empty.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-escapes.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-escapes.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-simple.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-simple.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-with-pound.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/string-with-pound.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-implicit.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-implicit.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-many.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-many.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-nest.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-nest.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-one.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-array-one.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-empty.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-empty.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-sub-empty.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-sub-empty.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-whitespace.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-whitespace.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-with-pound.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/table-with-pound.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/unicode-escape.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/unicode-escape.toml delete mode 100644 src/vendor/toml-0.1.30/tests/valid/unicode-literal.json delete mode 100644 src/vendor/toml-0.1.30/tests/valid/unicode-literal.toml rename src/vendor/{toml-0.1.30/Cargo.toml => toml/Cargo.toml.orig} (59%) create mode 100644 src/vendor/toml/tests/pretty.rs create mode 100644 src/vendor/toml/tests/valid/table-multi-empty.json create mode 100644 src/vendor/toml/tests/valid/table-multi-empty.toml create mode 100644 src/vendor/unicode-xid/.cargo-checksum.json create mode 100644 src/vendor/unicode-xid/.cargo-ok rename src/vendor/{toml-0.1.30 => unicode-xid}/.gitignore (60%) create mode 100644 src/vendor/unicode-xid/.travis.yml create mode 100644 src/vendor/unicode-xid/COPYRIGHT create mode 100644 src/vendor/unicode-xid/Cargo.toml create mode 100644 src/vendor/unicode-xid/LICENSE-APACHE rename src/vendor/{crossbeam => unicode-xid}/LICENSE-MIT (100%) create mode 100644 src/vendor/unicode-xid/README.md create mode 100755 src/vendor/unicode-xid/scripts/unicode.py create mode 100644 src/vendor/unicode-xid/src/lib.rs create mode 100644 src/vendor/unicode-xid/src/tables.rs create mode 100644 src/vendor/unicode-xid/src/tests.rs diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 370cf6c0b4..c424ca7ab0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -99,7 +99,7 @@ Before you can start building the compiler you need to configure the build for your system. In most cases, that will just mean using the defaults provided for Rust. -To change configuration, you must copy the file `src/bootstrap/config.toml.example` +To change configuration, you must copy the file `config.toml.example` to `config.toml` in the directory from which you will be running the build, and change the settings provided. @@ -237,10 +237,13 @@ Some common invocations of `x.py` are: ## Pull Requests Pull requests are the primary mechanism we use to change Rust. GitHub itself -has some [great documentation][pull-requests] on using the Pull Request -feature. We use the 'fork and pull' model described there. +has some [great documentation][pull-requests] on using the Pull Request feature. +We use the "fork and pull" model [described here][development-models], where +contributors push changes to their personal fork and create pull requests to +bring those changes into the source repository. -[pull-requests]: https://help.github.com/articles/using-pull-requests/ +[pull-requests]: https://help.github.com/articles/about-pull-requests/ +[development-models]: https://help.github.com/articles/about-collaborative-development-models/ Please make pull requests against the `master` branch. @@ -289,7 +292,7 @@ been approved. The PR then enters the [merge queue][merge-queue], where @bors will run all the tests on every platform we support. If it all works out, @bors will merge your code into `master` and close the pull request. -[merge-queue]: https://buildbot.rust-lang.org/homu/queue/rust +[merge-queue]: https://buildbot2.rust-lang.org/homu/queue/rust Speaking of tests, Rust has a comprehensive test suite. More information about it can be found @@ -412,4 +415,4 @@ are: [tlgba]: http://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/ [ro]: http://www.rustaceans.org/ [rctd]: ./src/test/COMPILER_TESTS.md -[cheatsheet]: https://buildbot.rust-lang.org/homu/ +[cheatsheet]: https://buildbot2.rust-lang.org/homu/ diff --git a/COPYRIGHT b/COPYRIGHT index 3c2786b8f8..f8b637d204 100644 --- a/COPYRIGHT +++ b/COPYRIGHT @@ -6,16 +6,17 @@ terms. Longer version: -The Rust Project is copyright 2010, The Rust Project -Developers. +Copyrights in the Rust project are retained by their contributors. No +copyright assignment is required to contribute to the Rust project. -Licensed under the Apache License, Version 2.0 - or the MIT -license , -at your option. All files in the project carrying such -notice may not be copied, modified, or distributed except -according to those terms. +Some files include explicit copyright notices and/or license notices. +For full authorship information, see AUTHORS.txt and the version control +history. + +Except as otherwise noted (below and/or in individual files), Rust is +licensed under the Apache License, Version 2.0 or + or the MIT license + or , at your option. The Rust Project includes packages written by third parties. @@ -282,25 +283,3 @@ their own copyright notices and license terms: NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -* Additional copyright may be retained by contributors other - than Mozilla, the Rust Project Developers, or the parties - enumerated in this file. Such copyright can be determined - on a case-by-case basis by examining the author of each - portion of a file in the revision-control commit records - of the project, or by consulting representative comments - claiming copyright ownership for a file. - - For example, the text: - - "Copyright (c) 2011 Google Inc." - - appears in some files, and these files thereby denote - that their author and copyright-holder is Google Inc. - - In all such cases, the absence of explicit licensing text - indicates that the contributor chose to license their work - for distribution under identical terms to those Mozilla - has chosen for the collective work, enumerated at the top - of this file. The only difference is the retention of - copyright itself, held by the contributor. diff --git a/LICENSE-MIT b/LICENSE-MIT index 25597d5838..31aa79387f 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,5 +1,3 @@ -Copyright (c) 2010 The Rust Project Developers - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the diff --git a/README.md b/README.md index a1f0186107..78a9f509bb 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ Read ["Installation"] from [The Book]. ``` > ***Note:*** Install locations can be adjusted by copying the config file - > from `./src/bootstrap/config.toml.example` to `./config.toml`, and + > from `./config.toml.example` to `./config.toml`, and > adjusting the `prefix` option under `[install]`. Various other options, such > as enabling debug information, are also supported, and are documented in > the config file. @@ -135,7 +135,7 @@ Windows build triples are: - `i686-pc-windows-msvc` - `x86_64-pc-windows-msvc` -The build triple can be specified by either specifying `--build=ABI` when +The build triple can be specified by either specifying `--build=` when invoking `x.py` commands, or by copying the `config.toml` file (as described in Building From Source), and modifying the `build` option under the `[build]` section. diff --git a/RELEASES.md b/RELEASES.md index d397ec5568..c3a7367a2e 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,370 @@ +Version 1.20.0 (2017-08-31) +=========================== + +Language +-------- +- [Associated constants in traits is now stabilised.][42809] +- [A lot of macro bugs are now fixed.][42913] + +Compiler +-------- + +- [Struct fields are now properly coerced to the expected field type.][42807] +- [Enabled wasm LLVM backend][42571] WASM can now be built with the + `wasm32-experimental-emscripten` target. +- [Changed some of the error messages to be more helpful.][42033] +- [Add support for RELRO(RELocation Read-Only) for platforms that support + it.][43170] +- [rustc now reports the total number of errors on compilation failure][43015] + previously this was only the number of errors in the pass that failed. +- [Expansion in rustc has been sped up 29x.][42533] +- [added `msp430-none-elf` target.][43099] +- [rustc will now suggest one-argument enum variant to fix type mismatch when + applicable][43178] +- [Fixes backtraces on Redox][43228] +- [rustc now identifies different versions of same crate when absolute paths of + different types match in an error message.][42826] + +Libraries +--------- + + +- [Relaxed Debug constraints on `{HashMap,BTreeMap}::{Keys,Values}`.][42854] +- [Impl `PartialEq`, `Eq`, `PartialOrd`, `Ord`, `Debug`, `Hash` for unsized + tuples.][43011] +- [Impl `fmt::{Display, Debug}` for `Ref`, `RefMut`, `MutexGuard`, + `RwLockReadGuard`, `RwLockWriteGuard`][42822] +- [Impl `Clone` for `DefaultHasher`.][42799] +- [Impl `Sync` for `SyncSender`.][42397] +- [Impl `FromStr` for `char`][42271] +- [Fixed how `{f32, f64}::{is_sign_negative, is_sign_positive}` handles + NaN.][42431] +- [allow messages in the `unimplemented!()` macro.][42155] + ie. `unimplemented!("Waiting for 1.21 to be stable")` +- [`pub(restricted)` is now supported in the `thread_local!` macro.][43185] +- [Upgrade to Unicode 10.0.0][42999] +- [Reimplemented `{f32, f64}::{min, max}` in Rust instead of using CMath.][42430] +- [Skip the main thread's manual stack guard on Linux][43072] +- [Iterator::nth for `ops::{Range, RangeFrom}` is now done in O(1) time][43077] +- [`#[repr(align(N))]` attribute max number is now 2^31 - 1.][43097] This was + previously 2^15. +- [`{OsStr, Path}::Display` now avoids allocations where possible][42613] + +Stabilized APIs +--------------- + +- [`CStr::into_c_string`] +- [`CString::as_c_str`] +- [`CString::into_boxed_c_str`] +- [`Chain::get_mut`] +- [`Chain::get_ref`] +- [`Chain::into_inner`] +- [`Option::get_or_insert_with`] +- [`Option::get_or_insert`] +- [`OsStr::into_os_string`] +- [`OsString::into_boxed_os_str`] +- [`Take::get_mut`] +- [`Take::get_ref`] +- [`Utf8Error::error_len`] +- [`char::EscapeDebug`] +- [`char::escape_debug`] +- [`compile_error!`] +- [`f32::from_bits`] +- [`f32::to_bits`] +- [`f64::from_bits`] +- [`f64::to_bits`] +- [`mem::ManuallyDrop`] +- [`slice::sort_unstable_by_key`] +- [`slice::sort_unstable_by`] +- [`slice::sort_unstable`] +- [`ste::from_boxed_utf8_unchecked`] +- [`str::as_bytes_mut`] +- [`str::as_bytes_mut`] +- [`str::from_utf8_mut`] +- [`str::from_utf8_unchecked_mut`] +- [`str::get_mut`] +- [`str::get_unchecked_mut`] +- [`str::get_unchecked`] +- [`str::get`] +- [`str::into_boxed_bytes`] + + +Cargo +----- +- [Cargo API token location moved from `~/.cargo/config` to + `~/.cargo/credentials`.][cargo/3978] +- [Cargo will now build `main.rs` binaries that are in sub-directories of + `src/bin`.][cargo/4214] ie. Having `src/bin/server/main.rs` and + `src/bin/client/main.rs` generates `target/debug/server` and `target/debug/client` +- [You can now specify version of a binary when installed through + `cargo install` using `--vers`.][cargo/4229] +- [Added `--no-fail-fast` flag to cargo to run all benchmarks regardless of + failure.][cargo/4248] +- [Changed the convention around which file is the crate root.][cargo/4259] +- [The `include`/`exclude` property in `Cargo.toml` now accepts gitignore paths + instead of glob patterns][cargo/4270]. Glob patterns are now deprecated. + +Compatibility Notes +------------------- + +- [Functions with `'static` in their return types will now not be as usable as + if they were using lifetime parameters instead.][42417] +- [The reimplementation of `{f32, f64}::is_sign_{negative, positive}` now + takes the sign of NaN into account where previously didn't.][42430] + +[42033]: https://github.com/rust-lang/rust/pull/42033 +[42155]: https://github.com/rust-lang/rust/pull/42155 +[42271]: https://github.com/rust-lang/rust/pull/42271 +[42397]: https://github.com/rust-lang/rust/pull/42397 +[42417]: https://github.com/rust-lang/rust/pull/42417 +[42430]: https://github.com/rust-lang/rust/pull/42430 +[42431]: https://github.com/rust-lang/rust/pull/42431 +[42533]: https://github.com/rust-lang/rust/pull/42533 +[42571]: https://github.com/rust-lang/rust/pull/42571 +[42613]: https://github.com/rust-lang/rust/pull/42613 +[42799]: https://github.com/rust-lang/rust/pull/42799 +[42807]: https://github.com/rust-lang/rust/pull/42807 +[42809]: https://github.com/rust-lang/rust/pull/42809 +[42822]: https://github.com/rust-lang/rust/pull/42822 +[42826]: https://github.com/rust-lang/rust/pull/42826 +[42854]: https://github.com/rust-lang/rust/pull/42854 +[42913]: https://github.com/rust-lang/rust/pull/42913 +[42999]: https://github.com/rust-lang/rust/pull/42999 +[43011]: https://github.com/rust-lang/rust/pull/43011 +[43015]: https://github.com/rust-lang/rust/pull/43015 +[43072]: https://github.com/rust-lang/rust/pull/43072 +[43077]: https://github.com/rust-lang/rust/pull/43077 +[43097]: https://github.com/rust-lang/rust/pull/43097 +[43099]: https://github.com/rust-lang/rust/pull/43099 +[43170]: https://github.com/rust-lang/rust/pull/43170 +[43178]: https://github.com/rust-lang/rust/pull/43178 +[43185]: https://github.com/rust-lang/rust/pull/43185 +[43228]: https://github.com/rust-lang/rust/pull/43228 +[cargo/3978]: https://github.com/rust-lang/cargo/pull/3978 +[cargo/4214]: https://github.com/rust-lang/cargo/pull/4214 +[cargo/4229]: https://github.com/rust-lang/cargo/pull/4229 +[cargo/4248]: https://github.com/rust-lang/cargo/pull/4248 +[cargo/4259]: https://github.com/rust-lang/cargo/pull/4259 +[cargo/4270]: https://github.com/rust-lang/cargo/pull/4270 +[`CStr::into_c_string`]: https://doc.rust-lang.org/std/ffi/struct.CStr.html#method.into_c_string +[`CString::as_c_str`]: https://doc.rust-lang.org/std/ffi/struct.CString.html#method.as_c_str +[`CString::into_boxed_c_str`]: https://doc.rust-lang.org/std/ffi/struct.CString.html#method.into_boxed_c_str +[`Chain::get_mut`]: https://doc.rust-lang.org/std/io/struct.Chain.html#method.get_mut +[`Chain::get_ref`]: https://doc.rust-lang.org/std/io/struct.Chain.html#method.get_ref +[`Chain::into_inner`]: https://doc.rust-lang.org/std/io/struct.Chain.html#method.into_inner +[`Option::get_or_insert_with`]: https://doc.rust-lang.org/std/option/enum.Option.html#method.get_or_insert_with +[`Option::get_or_insert`]: https://doc.rust-lang.org/std/option/enum.Option.html#method.get_or_insert +[`OsStr::into_os_string`]: https://doc.rust-lang.org/std/ffi/struct.OsStr.html#method.into_os_string +[`OsString::into_boxed_os_str`]: https://doc.rust-lang.org/std/ffi/struct.OsString.html#method.into_boxed_os_str +[`Take::get_mut`]: https://doc.rust-lang.org/std/io/struct.Take.html#method.get_mut +[`Take::get_ref`]: https://doc.rust-lang.org/std/io/struct.Take.html#method.get_ref +[`Utf8Error::error_len`]: https://doc.rust-lang.org/std/str/struct.Utf8Error.html#method.error_len +[`char::EscapeDebug`]: https://doc.rust-lang.org/std/char/struct.EscapeDebug.html +[`char::escape_debug`]: https://doc.rust-lang.org/std/primitive.char.html#method.escape_debug +[`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html +[`f32::from_bits`]: https://doc.rust-lang.org/std/primitive.f32.html#method.from_bits +[`f32::to_bits`]: https://doc.rust-lang.org/std/primitive.f32.html#method.to_bits +[`f64::from_bits`]: https://doc.rust-lang.org/std/primitive.f64.html#method.from_bits +[`f64::to_bits`]: https://doc.rust-lang.org/std/primitive.f64.html#method.to_bits +[`mem::ManuallyDrop`]: https://doc.rust-lang.org/std/mem/union.ManuallyDrop.html +[`slice::sort_unstable_by_key`]: https://doc.rust-lang.org/std/primitive.slice.html#method.sort_unstable_by_key +[`slice::sort_unstable_by`]: https://doc.rust-lang.org/std/primitive.slice.html#method.sort_unstable_by +[`slice::sort_unstable`]: https://doc.rust-lang.org/std/primitive.slice.html#method.sort_unstable +[`ste::from_boxed_utf8_unchecked`]: https://doc.rust-lang.org/std/str/fn.from_boxed_utf8_unchecked.html +[`str::as_bytes_mut`]: https://doc.rust-lang.org/std/primitive.str.html#method.as_bytes_mut +[`str::from_utf8_mut`]: https://doc.rust-lang.org/std/str/fn.from_utf8_mut.html +[`str::from_utf8_unchecked_mut`]: https://doc.rust-lang.org/std/str/fn.from_utf8_unchecked_mut.html +[`str::get_mut`]: https://doc.rust-lang.org/std/primitive.str.html#method.get_mut +[`str::get_unchecked_mut`]: https://doc.rust-lang.org/std/primitive.str.html#method.get_unchecked_mut +[`str::get_unchecked`]: https://doc.rust-lang.org/std/primitive.str.html#method.get_unchecked +[`str::get`]: https://doc.rust-lang.org/std/primitive.str.html#method.get +[`str::into_boxed_bytes`]: https://doc.rust-lang.org/std/primitive.str.html#method.into_boxed_bytes + + +Version 1.19.0 (2017-07-20) +=========================== + +Language +-------- + +- [Numeric fields can now be used for creating tuple structs.][41145] [RFC 1506] + For example `struct Point(u32, u32); let x = Point { 0: 7, 1: 0 };`. +- [Macro recursion limit increased to 1024 from 64.][41676] +- [Added lint for detecting unused macros.][41907] +- [`loop` can now return a value with `break`.][42016] [RFC 1624] + For example: `let x = loop { break 7; };` +- [C compatible `union`s are now available.][42068] [RFC 1444] They can only + contain `Copy` types and cannot have a `Drop` implementation. + Example: `union Foo { bar: u8, baz: usize }` +- [Non capturing closures can now be coerced into `fn`s,][42162] [RFC 1558] + Example: `let foo: fn(u8) -> u8 = |v: u8| { v };` + +Compiler +-------- + +- [Add support for bootstrapping the Rust compiler toolchain on Android.][41370] +- [Change `arm-linux-androideabi` to correspond to the `armeabi` + official ABI.][41656] If you wish to continue targeting the `armeabi-v7a` ABI + you should use `--target armv7-linux-androideabi`. +- [Fixed ICE when removing a source file between compilation sessions.][41873] +- [Minor optimisation of string operations.][42037] +- [Compiler error message is now `aborting due to previous error(s)` instead of + `aborting due to N previous errors`][42150] This was previously inaccurate and + would only count certain kinds of errors. +- [The compiler now supports Visual Studio 2017][42225] +- [The compiler is now built against LLVM 4.0.1 by default][42948] +- [Added a lot][42264] of [new error codes][42302] +- [Added `target-feature=+crt-static` option][37406] [RFC 1721] Which allows + libraries with C Run-time Libraries(CRT) to be statically linked. +- [Fixed various ARM codegen bugs][42740] + +Libraries +--------- + +- [`String` now implements `FromIterator>` and + `Extend>`][41449] +- [`Vec` now implements `From<&mut [T]>`][41530] +- [`Box<[u8]>` now implements `From>`][41258] +- [`SplitWhitespace` now implements `Clone`][41659] +- [`[u8]::reverse` is now 5x faster and `[u16]::reverse` is now + 1.5x faster][41764] +- [`eprint!` and `eprintln!` macros added to prelude.][41192] Same as the `print!` + macros, but for printing to stderr. + +Stabilized APIs +--------------- + +- [`OsString::shrink_to_fit`] +- [`cmp::Reverse`] +- [`Command::envs`] +- [`thread::ThreadId`] + +Cargo +----- + +- [Build scripts can now add environment variables to the environment + the crate is being compiled in. + Example: `println!("cargo:rustc-env=FOO=bar");`][cargo/3929] +- [Subcommands now replace the current process rather than spawning a new + child process][cargo/3970] +- [Workspace members can now accept glob file patterns][cargo/3979] +- [Added `--all` flag to the `cargo bench` subcommand to run benchmarks of all + the members in a given workspace.][cargo/3988] +- [Updated `libssh2-sys` to 0.2.6][cargo/4008] +- [Target directory path is now in the cargo metadata][cargo/4022] +- [Cargo no longer checks out a local working directory for the + crates.io index][cargo/4026] This should provide smaller file size for the + registry, and improve cloning times, especially on Windows machines. +- [Added an `--exclude` option for excluding certain packages when using the + `--all` option][cargo/4031] +- [Cargo will now automatically retry when receiving a 5xx error + from crates.io][cargo/4032] +- [The `--features` option now accepts multiple comma or space + delimited values.][cargo/4084] +- [Added support for custom target specific runners][cargo/3954] + +Misc +---- + +- [Added `rust-windbg.cmd`][39983] for loading rust `.natvis` files in the + Windows Debugger. +- [Rust will now release XZ compressed packages][rust-installer/57] +- [rustup will now prefer to download rust packages with + XZ compression][rustup/1100] over GZip packages. +- [Added the ability to escape `#` in rust documentation][41785] By adding + additional `#`'s ie. `##` is now `#` + +Compatibility Notes +------------------- + +- [`MutexGuard` may only be `Sync` if `T` is `Sync`.][41624] +- [`-Z` flags are now no longer allowed to be used on the stable + compiler.][41751] This has been a warning for a year previous to this. +- [As a result of the `-Z` flag change, the `cargo-check` plugin no + longer works][42844]. Users should migrate to the built-in `check` + command, which has been available since 1.16. +- [Ending a float literal with `._` is now a hard error. + Example: `42._` .][41946] +- [Any use of a private `extern crate` outside of its module is now a + hard error.][36886] This was previously a warning. +- [`use ::self::foo;` is now a hard error.][36888] `self` paths are always + relative while the `::` prefix makes a path absolute, but was ignored and the + path was relative regardless. +- [Floating point constants in match patterns is now a hard error][36890] + This was previously a warning. +- [Struct or enum constants that don't derive `PartialEq` & `Eq` used + match patterns is now a hard error][36891] This was previously a warning. +- [Lifetimes named `'_` are no longer allowed.][36892] This was previously + a warning. +- [From the pound escape, lines consisting of multiple `#`s are + now visible][41785] +- [It is an error to reexport private enum variants][42460]. This is + known to break a number of crates that depend on an older version of + mustache. +- [On Windows, if `VCINSTALLDIR` is set incorrectly, `rustc` will try + to use it to find the linker, and the build will fail where it did + not previously][42607] + +[36886]: https://github.com/rust-lang/rust/issues/36886 +[36888]: https://github.com/rust-lang/rust/issues/36888 +[36890]: https://github.com/rust-lang/rust/issues/36890 +[36891]: https://github.com/rust-lang/rust/issues/36891 +[36892]: https://github.com/rust-lang/rust/issues/36892 +[37406]: https://github.com/rust-lang/rust/issues/37406 +[39983]: https://github.com/rust-lang/rust/pull/39983 +[41145]: https://github.com/rust-lang/rust/pull/41145 +[41192]: https://github.com/rust-lang/rust/pull/41192 +[41258]: https://github.com/rust-lang/rust/pull/41258 +[41370]: https://github.com/rust-lang/rust/pull/41370 +[41449]: https://github.com/rust-lang/rust/pull/41449 +[41530]: https://github.com/rust-lang/rust/pull/41530 +[41624]: https://github.com/rust-lang/rust/pull/41624 +[41656]: https://github.com/rust-lang/rust/pull/41656 +[41659]: https://github.com/rust-lang/rust/pull/41659 +[41676]: https://github.com/rust-lang/rust/pull/41676 +[41751]: https://github.com/rust-lang/rust/pull/41751 +[41764]: https://github.com/rust-lang/rust/pull/41764 +[41785]: https://github.com/rust-lang/rust/pull/41785 +[41873]: https://github.com/rust-lang/rust/pull/41873 +[41907]: https://github.com/rust-lang/rust/pull/41907 +[41946]: https://github.com/rust-lang/rust/pull/41946 +[42016]: https://github.com/rust-lang/rust/pull/42016 +[42037]: https://github.com/rust-lang/rust/pull/42037 +[42068]: https://github.com/rust-lang/rust/pull/42068 +[42150]: https://github.com/rust-lang/rust/pull/42150 +[42162]: https://github.com/rust-lang/rust/pull/42162 +[42225]: https://github.com/rust-lang/rust/pull/42225 +[42264]: https://github.com/rust-lang/rust/pull/42264 +[42302]: https://github.com/rust-lang/rust/pull/42302 +[42460]: https://github.com/rust-lang/rust/issues/42460 +[42607]: https://github.com/rust-lang/rust/issues/42607 +[42740]: https://github.com/rust-lang/rust/pull/42740 +[42844]: https://github.com/rust-lang/rust/issues/42844 +[42948]: https://github.com/rust-lang/rust/pull/42948 +[RFC 1444]: https://github.com/rust-lang/rfcs/pull/1444 +[RFC 1506]: https://github.com/rust-lang/rfcs/pull/1506 +[RFC 1558]: https://github.com/rust-lang/rfcs/pull/1558 +[RFC 1624]: https://github.com/rust-lang/rfcs/pull/1624 +[RFC 1721]: https://github.com/rust-lang/rfcs/pull/1721 +[`Command::envs`]: https://doc.rust-lang.org/std/process/struct.Command.html#method.envs +[`OsString::shrink_to_fit`]: https://doc.rust-lang.org/std/ffi/struct.OsString.html#method.shrink_to_fit +[`cmp::Reverse`]: https://doc.rust-lang.org/std/cmp/struct.Reverse.html +[`thread::ThreadId`]: https://doc.rust-lang.org/std/thread/struct.ThreadId.html +[cargo/3929]: https://github.com/rust-lang/cargo/pull/3929 +[cargo/3954]: https://github.com/rust-lang/cargo/pull/3954 +[cargo/3970]: https://github.com/rust-lang/cargo/pull/3970 +[cargo/3979]: https://github.com/rust-lang/cargo/pull/3979 +[cargo/3988]: https://github.com/rust-lang/cargo/pull/3988 +[cargo/4008]: https://github.com/rust-lang/cargo/pull/4008 +[cargo/4022]: https://github.com/rust-lang/cargo/pull/4022 +[cargo/4026]: https://github.com/rust-lang/cargo/pull/4026 +[cargo/4031]: https://github.com/rust-lang/cargo/pull/4031 +[cargo/4032]: https://github.com/rust-lang/cargo/pull/4032 +[cargo/4084]: https://github.com/rust-lang/cargo/pull/4084 +[rust-installer/57]: https://github.com/rust-lang/rust-installer/pull/57 +[rustup/1100]: https://github.com/rust-lang-nursery/rustup.rs/pull/1100 + + Version 1.18.0 (2017-06-08) =========================== @@ -530,6 +897,9 @@ Compatibility Notes * [Ctrl-Z returns from `Stdin.read()` when reading from the console on Windows][38274] * [Clean up semantics of `self` in an import list][38313] +* Reimplemented lifetime elision. This change was almost entirely compatible + with existing code, but it did close a number of small bugs and loopholes, + as well as being more accepting in some other [cases][41105]. [37057]: https://github.com/rust-lang/rust/pull/37057 [37761]: https://github.com/rust-lang/rust/pull/37761 @@ -564,6 +934,7 @@ Compatibility Notes [39048]: https://github.com/rust-lang/rust/pull/39048 [39282]: https://github.com/rust-lang/rust/pull/39282 [39379]: https://github.com/rust-lang/rust/pull/39379 +[41105]: https://github.com/rust-lang/rust/issues/41105 [`<*const T>::wrapping_offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset [`<*mut T>::wrapping_offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset [`Duration::checked_add`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_add @@ -1495,7 +1866,7 @@ Tooling * [Test binaries now support a `--test-threads` argument to specify the number of threads used to run tests, and which acts the same as the - `RUST_TEST_THREADS` environment variable](https://github.com/rust-lang/rust/pull/35414) + `RUST_TEST_THREADS` environment variable](https://github.com/rust-lang/rust/pull/35414) * [The test runner now emits a warning when tests run over 60 seconds](https://github.com/rust-lang/rust/pull/35405) * [rustdoc: Fix methods in search results](https://github.com/rust-lang/rust/pull/34752) * [`rust-lldb` warns about unsupported versions of LLDB](https://github.com/rust-lang/rust/pull/34646) diff --git a/configure b/configure index e08bcc0282..664b473b2c 100755 --- a/configure +++ b/configure @@ -437,7 +437,6 @@ opt local-rust 0 "use an installed rustc rather than downloading a snapshot" opt local-rebuild 0 "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version" opt llvm-static-stdcpp 0 "statically link to libstdc++ for LLVM" opt llvm-link-shared 0 "prefer shared linking to LLVM (llvm-config --link-shared)" -opt llvm-clean-rebuild 0 "delete LLVM build directory on rebuild" opt rpath 1 "build rpaths into rustc itself" opt stage0-landing-pads 1 "enable landing pads during bootstrap with stage0" # This is used by the automation to produce single-target nightlies @@ -490,6 +489,7 @@ valopt musl-root-armhf "" "arm-unknown-linux-musleabihf install directory" valopt musl-root-armv7 "" "armv7-unknown-linux-musleabihf install directory" valopt extra-filename "" "Additional data that is hashed and passed to the -C extra-filename flag" valopt qemu-armhf-rootfs "" "rootfs in qemu testing, you probably don't want to use this" +valopt qemu-aarch64-rootfs "" "rootfs in qemu testing, you probably don't want to use this" valopt experimental-targets "" "experimental LLVM targets to build" if [ -e ${CFG_SRC_DIR}.git ] @@ -560,8 +560,8 @@ case "$CFG_RELEASE_CHANNEL" in *-pc-windows-gnu) ;; *) - CFG_ENABLE_DEBUGINFO_LINES=1 - CFG_ENABLE_DEBUGINFO_ONLY_STD=1 + enable_if_not_disabled debuginfo-lines + enable_if_not_disabled debuginfo-only-std ;; esac @@ -572,8 +572,8 @@ case "$CFG_RELEASE_CHANNEL" in *-pc-windows-gnu) ;; *) - CFG_ENABLE_DEBUGINFO_LINES=1 - CFG_ENABLE_DEBUGINFO_ONLY_STD=1 + enable_if_not_disabled debuginfo-lines + enable_if_not_disabled debuginfo-only-std ;; esac ;; diff --git a/src/Cargo.lock b/src/Cargo.lock index bcec823403..19f3042c70 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -65,6 +65,11 @@ name = "ansi_term" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "ar" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "arena" version = "0.0.0" @@ -75,7 +80,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -84,24 +89,37 @@ name = "backtrace" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace-sys 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "backtrace-sys" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "bin_lib" +version = "0.1.0" + +[[package]] +name = "bin_lib_no_cfg_test" +version = "0.1.0" + +[[package]] +name = "bitflags" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "bitflags" version = "0.8.2" @@ -121,12 +139,19 @@ dependencies = [ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "borrow_error" +version = "0.1.0" + [[package]] name = "bufstream" version = "0.1.3" @@ -136,8 +161,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "build-manifest" version = "0.1.0" dependencies = [ - "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -149,15 +175,16 @@ dependencies = [ [[package]] name = "cargo" -version = "0.21.0" +version = "0.22.0" dependencies = [ "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "cargotest 0.1.0", - "crates-io 0.10.0", + "core-foundation 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "crates-io 0.11.0", "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.11.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -169,26 +196,29 @@ dependencies = [ "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -197,22 +227,16 @@ dependencies = [ name = "cargotest" version = "0.1.0" dependencies = [ - "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "cargo 0.21.0", + "cargo 0.22.0", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -226,7 +250,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "clap" -version = "2.25.0" +version = "2.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -234,8 +258,8 @@ dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "textwrap 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "textwrap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -273,11 +297,15 @@ dependencies = [ "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "completion" +version = "0.1.0" + [[package]] name = "core" version = "0.0.0" @@ -285,14 +313,31 @@ dependencies = [ "rand 0.0.0", ] +[[package]] +name = "core-foundation" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "core-foundation-sys 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "core-foundation-sys" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "crates-io" -version = "0.10.0" +version = "0.11.0" dependencies = [ - "curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.11.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -304,14 +349,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "curl" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", - "socket2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -321,9 +366,9 @@ version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -338,6 +383,15 @@ dependencies = [ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "derive-new" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "diff" version = "0.1.10" @@ -350,8 +404,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -360,6 +414,23 @@ name = "dtoa" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "enum_primitive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "env_logger" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "env_logger" version = "0.4.3" @@ -388,21 +459,36 @@ dependencies = [ [[package]] name = "error_index_generator" version = "0.0.0" +dependencies = [ + "rustdoc 0.0.0", +] [[package]] name = "filetime" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "find_all_refs" +version = "0.1.0" + +[[package]] +name = "find_all_refs_no_cfg_test" +version = "0.1.0" + +[[package]] +name = "find_impls" +version = "0.1.0" + [[package]] name = "flate2" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -410,6 +496,11 @@ dependencies = [ name = "fmt_macros" version = "0.0.0" +[[package]] +name = "fnv" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "foreign-types" version = "0.2.0" @@ -421,10 +512,15 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "futures" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "gcc" version = "0.3.51" @@ -441,10 +537,10 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -453,7 +549,7 @@ name = "git2-curl" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -464,6 +560,22 @@ name = "glob" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "globset" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "goto_def" +version = "0.1.0" + [[package]] name = "graphviz" version = "0.0.0" @@ -473,7 +585,7 @@ name = "hamcrest" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -487,7 +599,7 @@ dependencies = [ "pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -496,9 +608,29 @@ name = "hex" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "highlight" +version = "0.1.0" + +[[package]] +name = "home" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "userenv-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "hover" +version = "0.1.0" + [[package]] name = "idna" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -506,11 +638,38 @@ dependencies = [ "unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "ignore" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", + "globset 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "infer_bin" +version = "0.1.0" + +[[package]] +name = "infer_custom_bin" +version = "0.1.0" + +[[package]] +name = "infer_lib" +version = "0.1.0" + [[package]] name = "installer" version = "0.0.0" dependencies = [ - "clap 2.25.0 (registry+https://github.com/rust-lang/crates.io-index)", + "clap 2.26.0 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -531,10 +690,22 @@ name = "jobserver" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "jsonrpc-core" +version = "7.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "kernel32-sys" version = "0.2.2" @@ -544,6 +715,19 @@ dependencies = [ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "languageserver-types" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "lazy_static" version = "0.2.8" @@ -558,7 +742,7 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.26" +version = "0.2.29" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -569,10 +753,10 @@ dependencies = [ "cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", "curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -582,9 +766,9 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -594,7 +778,7 @@ version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -610,12 +794,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "lzma-sys" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -628,16 +812,16 @@ name = "mdbook" version = "0.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "clap 2.25.0 (registry+https://github.com/rust-lang/crates.io-index)", + "clap 2.26.0 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "handlebars 0.26.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -645,7 +829,7 @@ name = "memchr" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -653,7 +837,7 @@ name = "memchr" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -662,7 +846,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -671,87 +855,91 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "multiple_bins" +version = "0.1.0" + [[package]] name = "net2" -version = "0.2.29" +version = "0.2.31" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num" -version = "0.1.39" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-bigint 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", - "num-complex 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)", - "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", - "num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)", - "num-rational 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-bigint 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "num-complex 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-iter 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", + "num-rational 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-bigint" -version = "0.1.39" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-complex" -version = "0.1.38" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-integer" -version = "0.1.34" +version = "0.1.35" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-iter" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-rational" -version = "0.1.38" +version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "num-bigint 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", - "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", + "num-bigint 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "num-traits" -version = "0.1.39" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -759,9 +947,13 @@ name = "num_cpus" version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "omit_init_build" +version = "0.1.0" + [[package]] name = "open" version = "1.2.0" @@ -769,14 +961,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl" -version = "0.9.15" +version = "0.9.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -786,12 +978,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl-sys" -version = "0.9.15" +version = "0.9.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -874,11 +1067,30 @@ name = "quick-error" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "quote" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "quote" version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "racer" +version = "2.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "clap 2.26.0 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rand" version = "0.0.0" @@ -891,9 +1103,17 @@ name = "rand" version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "reformat" +version = "0.1.0" + +[[package]] +name = "reformat_with_range" +version = "0.1.0" + [[package]] name = "regex" version = "0.1.80" @@ -936,28 +1156,85 @@ version = "0.1.0" name = "remote-test-server" version = "0.1.0" +[[package]] +name = "rename" +version = "0.1.0" + +[[package]] +name = "rls" +version = "0.121.0" +dependencies = [ + "cargo 0.22.0", + "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "jsonrpc-core 7.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "languageserver-types 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "racer 2.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-analysis 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-data 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-rustc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "rustfmt-nightly 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rls-analysis" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-data 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rls-data" -version = "0.7.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rls-rustc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "rls-span" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rls-vfs" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "racer 2.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "rustbook" version = "0.1.0" dependencies = [ - "clap 2.25.0 (registry+https://github.com/rust-lang/crates.io-index)", + "clap 2.26.0 (registry+https://github.com/rust-lang/crates.io-index)", "mdbook 0.0.22 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -984,7 +1261,7 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -993,7 +1270,6 @@ version = "0.0.0" dependencies = [ "rustc_back 0.0.0", "rustc_driver 0.0.0", - "rustdoc 0.0.0", ] [[package]] @@ -1011,6 +1287,13 @@ dependencies = [ "syntax_pos 0.0.0", ] +[[package]] +name = "rustc_apfloat" +version = "0.0.0" +dependencies = [ + "rustc_bitflags 0.0.0", +] + [[package]] name = "rustc_asan" version = "0.0.0" @@ -1042,7 +1325,6 @@ dependencies = [ "graphviz 0.0.0", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", - "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", "rustc_mir 0.0.0", "syntax 0.0.0", @@ -1068,6 +1350,7 @@ dependencies = [ name = "rustc_const_math" version = "0.0.0" dependencies = [ + "rustc_apfloat 0.0.0", "serialize 0.0.0", "syntax 0.0.0", ] @@ -1084,10 +1367,12 @@ dependencies = [ name = "rustc_driver" version = "0.0.0" dependencies = [ + "ar 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "arena 0.0.0", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "graphviz 0.0.0", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc_allocator 0.0.0", "rustc_back 0.0.0", @@ -1105,6 +1390,7 @@ dependencies = [ "rustc_resolve 0.0.0", "rustc_save_analysis 0.0.0", "rustc_trans 0.0.0", + "rustc_trans_utils 0.0.0", "rustc_typeck 0.0.0", "serialize 0.0.0", "syntax 0.0.0", @@ -1175,7 +1461,6 @@ dependencies = [ "proc_macro 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", - "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", "rustc_errors 0.0.0", "serialize 0.0.0", @@ -1195,6 +1480,7 @@ dependencies = [ "rustc_const_eval 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", ] @@ -1265,10 +1551,11 @@ name = "rustc_save_analysis" version = "0.0.0" dependencies = [ "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "rls-data 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rls-data 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc_data_structures 0.0.0", "rustc_typeck 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", @@ -1278,14 +1565,14 @@ dependencies = [ name = "rustc_trans" version = "0.0.0" dependencies = [ - "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", "jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "rustc 0.0.0", - "rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_allocator 0.0.0", "rustc_back 0.0.0", "rustc_bitflags 0.0.0", @@ -1295,15 +1582,26 @@ dependencies = [ "rustc_incremental 0.0.0", "rustc_llvm 0.0.0", "rustc_platform_intrinsics 0.0.0", + "rustc_trans_utils 0.0.0", "serialize 0.0.0", "syntax 0.0.0", "syntax_pos 0.0.0", ] +[[package]] +name = "rustc_trans_utils" +version = "0.0.0" +dependencies = [ + "rustc 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + [[package]] name = "rustc_tsan" version = "0.0.0" dependencies = [ + "alloc 0.0.0", "alloc_system 0.0.0", "build_helper 0.1.0", "cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1331,25 +1629,40 @@ dependencies = [ name = "rustdoc" version = "0.0.0" dependencies = [ - "arena 0.0.0", "build_helper 0.1.0", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_data_structures 0.0.0", - "rustc_driver 0.0.0", - "rustc_errors 0.0.0", - "rustc_lint 0.0.0", - "rustc_metadata 0.0.0", - "rustc_resolve 0.0.0", - "rustc_trans 0.0.0", - "rustc_typeck 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", +] + +[[package]] +name = "rustdoc-tool" +version = "0.0.0" +dependencies = [ + "rustdoc 0.0.0", +] + +[[package]] +name = "rustfmt-nightly" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "strings 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1366,13 +1679,18 @@ name = "scoped-tls" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "scopeguard" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "semver" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1382,12 +1700,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde_derive" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1409,7 +1727,7 @@ name = "serde_ignored" version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1419,8 +1737,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1434,12 +1752,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "socket2" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1481,11 +1799,28 @@ dependencies = [ "core 0.0.0", ] +[[package]] +name = "strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "strsim" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "syn" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "syn" version = "0.11.11" @@ -1521,7 +1856,6 @@ name = "syntax_ext" version = "0.0.0" dependencies = [ "fmt_macros 0.0.0", - "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "proc_macro 0.0.0", "rustc_errors 0.0.0", "syntax 0.0.0", @@ -1536,13 +1870,49 @@ dependencies = [ "serialize 0.0.0", ] +[[package]] +name = "syntex_errors" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syntex_pos" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "syntex_syntax" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)", + "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "tar" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1558,13 +1928,22 @@ dependencies = [ name = "term" version = "0.0.0" +[[package]] +name = "term" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "term_size" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1586,7 +1965,7 @@ dependencies = [ [[package]] name = "textwrap" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1599,7 +1978,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1625,7 +2004,7 @@ version = "0.1.0" [[package]] name = "toml" -version = "0.1.30" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1633,12 +2012,17 @@ dependencies = [ [[package]] name = "toml" -version = "0.4.2" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "typed-arena" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "unicode-bidi" version = "0.3.4" @@ -1654,7 +2038,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "unicode-segmentation" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1662,6 +2046,11 @@ name = "unicode-width" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "unicode-xid" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "unicode-xid" version = "0.0.4" @@ -1687,11 +2076,29 @@ name = "url" version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "url_serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)", + "url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "userenv-sys" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "utf8-ranges" version = "0.1.3" @@ -1760,7 +2167,7 @@ name = "xattr" version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1768,7 +2175,7 @@ name = "xz2" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "lzma-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "lzma-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1781,67 +2188,81 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66" "checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699" "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6" +"checksum ar 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b24e4eef8e3fa7e2ca75b157e6039cdf8d9d3a68213ddc19d0fd9d576b9717c9" "checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159" "checksum backtrace 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "72f9b4182546f4b04ebc4ab7f84948953a118bd6021a1b6a6c909e3e94f6be76" -"checksum backtrace-sys 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3a0d842ea781ce92be2bf78a9b38883948542749640b8378b3b2f03d1fd9f1ff" +"checksum backtrace-sys 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "afccc5772ba333abccdf60d55200fa3406f8c59dcf54d5f7998c9107d3799c7c" +"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" "checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4" "checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" "checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32" "checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de" -"checksum clap 2.25.0 (registry+https://github.com/rust-lang/crates.io-index)" = "867a885995b4184be051b70a592d4d70e32d7a188db6e8dff626af286a962771" +"checksum clap 2.26.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2267a8fdd4dce6956ba6649e130f62fb279026e5e84b92aa939ac8f85ce3f9f0" "checksum cmake 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "b8ebbb35d3dc9cd09497168f33de1acb79b265d350ab0ac34133b98f8509af1f" +"checksum core-foundation 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5909502e547762013619f4c4e01cc7393c20fe2d52d7fa471c1210adb2320dc7" +"checksum core-foundation-sys 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bc9fb3d6cb663e6fd7cf1c63f9b144ee2b1e4a78595a0451dd34bff85b9a3387" "checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97" -"checksum curl 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6689276ab61f97c660669a5ecc117c36875dfc1ba301c986b16c653415bdf9d7" +"checksum curl 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7034c534a1d7d22f7971d6088aa9d281d219ef724026c3428092500f41ae9c2c" "checksum curl-sys 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d5481162dc4f424d088581db2f979fa7d4c238fe9794595de61d8d7522e277de" "checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850" +"checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e" "checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472" "checksum docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b5b93718f8b3e5544fcc914c43de828ca6c6ace23e0332c6080a2977b49787a" "checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90" +"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180" +"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" "checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" "checksum error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9435d864e017c3c6afeac1654189b06cdb491cf2ff73dbf0d73b0f292f42ff8" "checksum error-chain 0.11.0-rc.2 (registry+https://github.com/rust-lang/crates.io-index)" = "38d3a55d9a7a456748f2a3912c0941a5d9a68006eb15b3c3c9836b8420dc102d" "checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922" "checksum flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)" = "36df0166e856739905cd3d7e0b210fe818592211a008862599845e012d8d304c" +"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344" "checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d" "checksum fs2 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ab76cfd2aaa59b7bf6688ad9ba15bbae64bff97f04ea02144cfd3443e5c2866" +"checksum futures 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "4b63a4792d4f8f686defe3b39b92127fea6344de5d38202b2ee5a11bbbf29d6a" "checksum gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)" = "120d07f202dcc3f72859422563522b66fe6463a4c513df062874daad05f85f0a" "checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" "checksum git2 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aa01936ac96555c083c0e8553f672616274408d9d3fc5b8696603fbf63ff43ee" "checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" +"checksum globset 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "feeb1b6840809ef5efcf7a4a990bc4e1b7ee3df8cf9e2379a75aeb2ba42ac9c3" "checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4" "checksum handlebars 0.26.2 (registry+https://github.com/rust-lang/crates.io-index)" = "fbba80e74e9591a5f6a4ffff6b7f9d645759a896e431cfbdc853e9184370294a" "checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" -"checksum idna 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2233d4940b1f19f0418c158509cd7396b8d70a5db5705ce410914dc8fa603b37" +"checksum home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f25ae61099d8f3fee8b483df0bd4ecccf4b2731897aad40d50eca1b641fe6db" +"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d" +"checksum ignore 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b3fcaf2365eb14b28ec7603c98c06cc531f19de9eb283d89a3dff8417c8c99f5" "checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c" "checksum jobserver 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "443ae8bc0af6c106e6e8b77e04684faecc1a5ce94e058f4c2b0a037b0ea1b133" +"checksum jsonrpc-core 7.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "903e5eee845f3d83c1436d12848d97b1247cf850ff06a8e1db2f1ce3543af2cf" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum languageserver-types 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d52e477b23bf52cd3ca0f9fc6c5d14be954eec97e3b9cdfbd962d911bd533caf" "checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf" -"checksum libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)" = "30885bcb161cf67054244d10d4a7f4835ffd58773bc72e07d35fecf472295503" +"checksum libc 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "8a014d9226c2cc402676fbe9ea2e15dd5222cd1dd57f576b5b283178c944a264" "checksum libgit2-sys 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "df18a822100352d9863b302faf6f8f25c0e77f0e60feb40e5dbe1238b7f13b1d" "checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75" "checksum libz-sys 1.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd64ef8ee652185674455c1d450b83cbc8ad895625d543b5324d923f82e4d8" "checksum log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "880f77541efa6e5cc74e76910c9884d9859683118839d6a1dc3b11e63512565b" -"checksum lzma-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "013fa6506eb7d26040c46dab9ecb7ccb4e2896b5bf24a9d65932501ea9f67af8" +"checksum lzma-sys 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "66b2e318eb97ab84f05725471f90c52a09c964053a5899a13fd0165acc26d00b" "checksum matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "100aabe6b8ff4e4a7e32c1c13523379802df0772b82466207ac25b013f193376" "checksum mdbook 0.0.22 (registry+https://github.com/rust-lang/crates.io-index)" = "22911d86cde6f80fa9f0fb2a68bbbde85d97af4fe0ce267141c83a4187d28700" "checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" "checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4" "checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726" "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" -"checksum net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "bc01404e7568680f1259aa5729539f221cb1e6d047a0d9053cab4be8a73b5d67" -"checksum num 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "2c3a3dc9f30bf824141521b30c908a859ab190b76e20435fcd89f35eb6583887" -"checksum num-bigint 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "6361748d02e5291c72a422dc8ed4d8464a80cb1e618971f6fffe6d52d97e3286" -"checksum num-complex 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)" = "412dfc143c56579aa6a22c574e38ddbf724522f1280ae2b257498cccff3fb6af" -"checksum num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "ef1a4bf6f9174aa5783a9b4cc892cacd11aebad6c69ad027a0b65c6ca5f8aa37" -"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e" -"checksum num-rational 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)" = "33c881e104a26e1accc09449374c095ff2312c8e0c27fab7bbefe16eac7c776d" -"checksum num-traits 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "1708c0628602a98b52fad936cf3edb9a107af06e52e49fdf0707e884456a6af6" +"checksum net2 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)" = "3a80f842784ef6c9a958b68b7516bc7e35883c614004dd94959a4dca1b716c09" +"checksum num 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "a311b77ebdc5dd4cf6449d81e4135d9f0e3b153839ac90e648a8ef538f923525" +"checksum num-bigint 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "8fd0f8dbb4c0960998958a796281d88c16fbe68d87b1baa6f31e2979e81fd0bd" +"checksum num-complex 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "503e668405c5492d67cf662a81e05be40efe2e6bcf10f7794a07bd9865e704e6" +"checksum num-integer 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "d1452e8b06e448a07f0e6ebb0bb1d92b8890eea63288c0b627331d53514d0fba" +"checksum num-iter 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "7485fcc84f85b4ecd0ea527b14189281cf27d60e583ae65ebc9c088b13dffe01" +"checksum num-rational 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "288629c76fac4b33556f4b7ab57ba21ae202da65ba8b77466e6d598e31990790" +"checksum num-traits 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "99843c856d68d8b4313b03a17e33c4bb42ae8f6610ea81b28abe076ac721b9b0" "checksum num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aec53c34f2d0247c5ca5d32cca1478762f301740468ee9ee6dcb7a0dd7a0c584" "checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842" -"checksum openssl 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "f776f1d8af832fd2c637ee182c801e8f7ea8895718a2be9914cca001f6e2c40a" +"checksum openssl 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)" = "085aaedcc89a2fac1eb2bc19cd66f29d4ea99fec60f82a5f3a88a6be7dbd90b5" "checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf" -"checksum openssl-sys 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "ad95f8160d1c150c4f44d4c4959732e048ac046c37f597fe362f8bf57561ffb4" +"checksum openssl-sys 0.9.17 (registry+https://github.com/rust-lang/crates.io-index)" = "7e3a9845a4c9fdb321931868aae5549e96bb7b979bf9af7de03603d74691b5f3" "checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" "checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356" "checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8" @@ -1849,48 +2270,65 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478" "checksum pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9ab1e588ef8efd702c7ed9d2bd774db5e6f4d878bb5a1a9f371828fbdff6973" "checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469" +"checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504" "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" +"checksum racer 2.0.10 (registry+https://github.com/rust-lang/crates.io-index)" = "f120c7510ef7aff254aeb06067fb6fac573ec96a1660e194787cf9dced412bf0" "checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d" "checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f" "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" "checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957" "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" -"checksum rls-data 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e502ac679bc35e023e982506c32d0278ef89e29af1e4ad21cb70c44b525b87a9" +"checksum rls-analysis 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d2cb40c0371765897ae428b5706bb17135705ad4f6d1b8b6afbaabcf8c9b5cff" +"checksum rls-data 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "11d339f1888e33e74d8032de0f83c40b2bdaaaf04a8cfc03b32186c3481fb534" +"checksum rls-rustc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5fa757c9d547d460427ceff01875f9cac5f5acd8fc6543946e9b0335ba29d537" "checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a" -"checksum rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3058a43ada2c2d0b92b3ae38007a2d0fa5e9db971be260e0171408a4ff471c95" +"checksum rls-vfs 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd34691a510938bb67fe0444fb363103c73ffb31c121d1e16bc92d8945ea8ff" +"checksum rustc-demangle 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "aee45432acc62f7b9a108cc054142dac51f979e69e71ddce7d6fc7adf29e817e" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" +"checksum rustfmt-nightly 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6eea0d0590ae793fc4d281df56e01dc7531575c8ed9a72fadf5fdc7305a0d32f" "checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7" "checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d" +"checksum scopeguard 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57" "checksum semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd61b85a0fa777f7fb7c454b9189b2941b110d1385ce84d7f76efdf1606a85" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" -"checksum serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)" = "433d7d9f8530d5a939ad5e0e72a6243d2e42a24804f70bf592c679363dcacb2f" -"checksum serde_derive 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)" = "7b707cf0d4cab852084f573058def08879bb467fda89d99052485e7d00edd624" +"checksum serde 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "f7726f29ddf9731b17ff113c461e362c381d9d69433f79de4f3dd572488823e9" +"checksum serde_derive 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "cf823e706be268e73e7747b147aa31c8f633ab4ba31f115efb57e5047c3a76dd" "checksum serde_derive_internals 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)" = "37aee4e0da52d801acfbc0cc219eb1eda7142112339726e427926a6f6ee65d3a" "checksum serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c10e798e4405d7dcec3658989e35ee6706f730a9ed7c1184d5ebd84317e82f46" "checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b" "checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8" -"checksum socket2 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "12cdbddbaa27bf94cc194b8e37f5811db6fe83cea96cf99cf1f8e92b65a41371" +"checksum socket2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4daf80fcf54186fac4fe049e0b39d36a5cfde69a11a06413e61e77f553cccf9a" "checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b" +"checksum strings 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "da75d8bf2c4d210d63dd09581a041b036001f9f6e03d9b151dbff810fb7ba26a" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" "checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" +"checksum syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6ae6fb0dcc9bd85f89a1a4adc0df2fd90c90c98849d61433983dd7a9df6363f7" "checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" +"checksum syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9e52bffe6202cfb67587784cf23e0ec5bf26d331eef4922a16d5c42e12aa1e9b" +"checksum syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "955ef4b16af4c468e4680d1497f873ff288f557d338180649e18f915af5e15ac" +"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde" "checksum tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "281285b717926caa919ad905ef89c63d75805c7d89437fb873100925a53f2b1b" "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" +"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" "checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209" "checksum termcolor 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9a5193a56b8d82014662c4b933dea6bec851daf018a2b01722e007daaf5f9dca" -"checksum textwrap 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f86300c3e7416ee233abd7cda890c492007a3980f941f79185c753a701257167" +"checksum textwrap 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f728584ea33b0ad19318e20557cb0a39097751dbb07171419673502f848c7af6" "checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03" "checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5" "checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14" -"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796" -"checksum toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b0601da6c97135c8d330c7a13a013ca6cd4143221b01de2f8d4edc50a9e551c7" +"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4" +"checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e" +"checksum typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5934776c3ac1bea4a9d56620d6bf2d483b20d394e49581db40f187e1118ff667" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f" -"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3" +"checksum unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a8083c594e02b8ae1654ae26f0ade5158b119bd88ad0e8227a5d8fcd72407946" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" +"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb" "checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum url 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27" +"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea" +"checksum userenv-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d28ea36bbd9192d75bd9fa9b39f96ddb986eaee824adae5d53b6e51919b2f3" "checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" "checksum vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9e0a7d8bed3178a8fb112199d466eeca9ed09a14ba8ad67718179b4fd5487d0b" diff --git a/src/Cargo.toml b/src/Cargo.toml index dd775280f4..8754d5b2b6 100644 --- a/src/Cargo.toml +++ b/src/Cargo.toml @@ -16,6 +16,27 @@ members = [ "tools/remote-test-server", "tools/rust-installer", "tools/cargo", + "tools/rustdoc", + "tools/rls", + # FIXME(https://github.com/rust-lang/cargo/issues/4089): move these to exclude + "tools/rls/test_data/borrow_error", + "tools/rls/test_data/completion", + "tools/rls/test_data/find_all_refs", + "tools/rls/test_data/find_all_refs_no_cfg_test", + "tools/rls/test_data/goto_def", + "tools/rls/test_data/highlight", + "tools/rls/test_data/hover", + "tools/rls/test_data/rename", + "tools/rls/test_data/reformat", + "tools/rls/test_data/bin_lib_no_cfg_test", + "tools/rls/test_data/multiple_bins", + "tools/rls/test_data/bin_lib", + "tools/rls/test_data/reformat_with_range", + "tools/rls/test_data/find_impls", + "tools/rls/test_data/infer_bin", + "tools/rls/test_data/infer_custom_bin", + "tools/rls/test_data/infer_lib", + "tools/rls/test_data/omit_init_build", ] # Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 8842dce025..daa2a3d0a0 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -33,8 +33,11 @@ build_helper = { path = "../build_helper" } cmake = "0.1.23" filetime = "0.1" num_cpus = "1.0" -toml = "0.1" getopts = "0.2" -rustc-serialize = "0.3" gcc = "0.3.50" libc = "0.2" +serde = "1.0.8" +serde_derive = "1.0.8" +serde_json = "1.0.2" +toml = "0.4" +lazy_static = "0.2" diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index 1ce99eb893..2e844ceb17 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -73,16 +73,19 @@ The script accepts commands, flags, and arguments to determine what to do: ## Configuring rustbuild -There are currently two primary methods for configuring the rustbuild build -system. First, the `./configure` options serialized in `config.mk` will be -parsed and read. That is, if any `./configure` options are passed, they'll be -handled naturally. +There are currently two methods for configuring the rustbuild build system. -Next, rustbuild offers a TOML-based configuration system with a `config.toml` +First, rustbuild offers a TOML-based configuration system with a `config.toml` file in the same location as `config.mk`. An example of this configuration can -be found at `src/bootstrap/config.toml.example`, and the configuration file -can also be passed as `--config path/to/config.toml` if the build system is -being invoked manually (via the python script). +be found at `config.toml.example`, and the configuration file can also be passed +as `--config path/to/config.toml` if the build system is being invoked manually +(via the python script). + +Next, the `./configure` options serialized in `config.mk` will be +parsed and read. That is, if any `./configure` options are passed, they'll be +handled naturally. `./configure` should almost never be used for local +installations, and is primarily useful for CI. Prefer to customize behavior +using `config.toml`. Finally, rustbuild makes use of the [gcc-rs crate] which has [its own method][env-vars] of configuring C compilers and C flags via environment @@ -310,17 +313,18 @@ After that, each module in rustbuild should have enough documentation to keep you up and running. Some general areas that you may be interested in modifying are: -* Adding a new build tool? Take a look at `bootstrap/step.rs` for examples of +* Adding a new build tool? Take a look at `bootstrap/tool.rs` for examples of other tools. * Adding a new compiler crate? Look no further! Adding crates can be done by adding a new directory with `Cargo.toml` followed by configuring all `Cargo.toml` files accordingly. -* Adding a new dependency from crates.io? We're still working on that, so hold - off on that for now. -* Adding a new configuration option? Take a look at `bootstrap/config.rs` or - perhaps `bootstrap/flags.rs` and then modify the build elsewhere to read that - option. +* Adding a new dependency from crates.io? This should just work inside the + compiler artifacts stage (everything other than libtest and libstd). +* Adding a new configuration option? You'll want to modify `bootstrap/flags.rs` + for command line flags and then `bootstrap/config.rs` to copy the flags to the + `Config` struct. * Adding a sanity check? Take a look at `bootstrap/sanity.rs`. -If you have any questions feel free to reach out on `#rust-internals` on IRC or -open an issue in the bug tracker! +If you have any questions feel free to reach out on `#rust-infra` on IRC or ask on +internals.rust-lang.org. When you encounter bugs, please file issues on the +rust-lang/rust issue tracker. diff --git a/src/bootstrap/bin/main.rs b/src/bootstrap/bin/main.rs index 5ef18b8984..d02bc7972a 100644 --- a/src/bootstrap/bin/main.rs +++ b/src/bootstrap/bin/main.rs @@ -21,11 +21,10 @@ extern crate bootstrap; use std::env; -use bootstrap::{Flags, Config, Build}; +use bootstrap::{Config, Build}; fn main() { let args = env::args().skip(1).collect::>(); - let flags = Flags::parse(&args); - let config = Config::parse(&flags.build, flags.config.clone()); - Build::new(flags, config).build(); + let config = Config::parse(&args); + Build::new(config).build(); } diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index 134406b1ac..0baca9e58f 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -185,7 +185,10 @@ fn main() { // Emit save-analysis info. if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) { - cmd.arg("-Zsave-analysis-api"); + cmd.arg("-Zsave-analysis"); + cmd.env("RUST_SAVE_ANALYSIS_CONFIG", + "{\"output_file\": null,\"full_docs\": false,\"pub_only\": true,\ + \"distro_crate\": true,\"signatures\": false,\"borrow_data\": false}"); } // Dealing with rpath here is a little special, so let's go into some @@ -234,9 +237,13 @@ fn main() { } } - if target.contains("pc-windows-msvc") { - cmd.arg("-Z").arg("unstable-options"); - cmd.arg("-C").arg("target-feature=+crt-static"); + if let Ok(s) = env::var("RUSTC_CRT_STATIC") { + if s == "true" { + cmd.arg("-C").arg("target-feature=+crt-static"); + } + if s == "false" { + cmd.arg("-C").arg("target-feature=-crt-static"); + } } // Force all crates compiled by this compiler to (a) be unstable and (b) diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index ebc4c2fdf7..9369a55ccb 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -37,12 +37,12 @@ def get(url, path, verbose=False): if os.path.exists(path): if verify(path, sha_path, False): if verbose: - print("using already-download file " + path) + print("using already-download file", path) return else: if verbose: - print("ignoring already-download file " + - path + " due to failed verification") + print("ignoring already-download file", + path, "due to failed verification") os.unlink(path) download(temp_path, url, True, verbose) if not verify(temp_path, sha_path, verbose): @@ -59,12 +59,12 @@ def delete_if_present(path, verbose): """Remove the given file if present""" if os.path.isfile(path): if verbose: - print("removing " + path) + print("removing", path) os.unlink(path) def download(path, url, probably_big, verbose): - for x in range(0, 4): + for _ in range(0, 4): try: _download(path, url, probably_big, verbose, True) return @@ -96,7 +96,7 @@ def _download(path, url, probably_big, verbose, exception): def verify(path, sha_path, verbose): """Check if the sha256 sum of the given path is valid""" if verbose: - print("verifying " + path) + print("verifying", path) with open(path, "rb") as source: found = hashlib.sha256(source.read()).hexdigest() with open(sha_path, "r") as sha256sum: @@ -111,29 +111,30 @@ def verify(path, sha_path, verbose): def unpack(tarball, dst, verbose=False, match=None): """Unpack the given tarball file""" - print("extracting " + tarball) + print("extracting", tarball) fname = os.path.basename(tarball).replace(".tar.gz", "") with contextlib.closing(tarfile.open(tarball)) as tar: - for p in tar.getnames(): - if "/" not in p: + for member in tar.getnames(): + if "/" not in member: continue - name = p.replace(fname + "/", "", 1) + name = member.replace(fname + "/", "", 1) if match is not None and not name.startswith(match): continue name = name[len(match) + 1:] - fp = os.path.join(dst, name) + dst_path = os.path.join(dst, name) if verbose: - print(" extracting " + p) - tar.extract(p, dst) - tp = os.path.join(dst, p) - if os.path.isdir(tp) and os.path.exists(fp): + print(" extracting", member) + tar.extract(member, dst) + src_path = os.path.join(dst, member) + if os.path.isdir(src_path) and os.path.exists(dst_path): continue - shutil.move(tp, fp) + shutil.move(src_path, dst_path) shutil.rmtree(os.path.join(dst, fname)) def run(args, verbose=False, exception=False, **kwargs): + """Run a child program in a new process""" if verbose: print("running: " + ' '.join(args)) sys.stdout.flush() @@ -149,97 +150,118 @@ def run(args, verbose=False, exception=False, **kwargs): def stage0_data(rust_root): + """Build a dictionary from stage0.txt""" nightlies = os.path.join(rust_root, "src/stage0.txt") - data = {} with open(nightlies, 'r') as nightlies: - for line in nightlies: - line = line.rstrip() # Strip newline character, '\n' - if line.startswith("#") or line == '': - continue - a, b = line.split(": ", 1) - data[a] = b - return data + lines = [line.rstrip() for line in nightlies + if not line.startswith("#")] + return dict([line.split(": ", 1) for line in lines if line]) def format_build_time(duration): + """Return a nicer format for build time + + >>> format_build_time('300') + '0:05:00' + """ return str(datetime.timedelta(seconds=int(duration))) class RustBuild(object): + """Provide all the methods required to build Rust""" + def __init__(self): + self.cargo_channel = '' + self.date = '' + self._download_url = 'https://static.rust-lang.org' + self.rustc_channel = '' + self.build = '' + self.build_dir = os.path.join(os.getcwd(), "build") + self.clean = False + self.config_mk = '' + self.config_toml = '' + self.printed = False + self.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) + self.use_locked_deps = '' + self.use_vendored_sources = '' + self.verbose = False def download_stage0(self): - cache_dst = os.path.join(self.build_dir, "cache") - rustc_cache = os.path.join(cache_dst, self.stage0_date()) - if not os.path.exists(rustc_cache): - os.makedirs(rustc_cache) + """Fetch the build system for Rust, written in Rust + + This method will build a cache directory, then it will fetch the + tarball which has the stage0 compiler used to then bootstrap the Rust + compiler itself. - rustc_channel = self.stage0_rustc_channel() - cargo_channel = self.stage0_cargo_channel() + Each downloaded tarball is extracted, after that, the script + will move all the content to the right place. + """ + rustc_channel = self.rustc_channel + cargo_channel = self.cargo_channel if self.rustc().startswith(self.bin_root()) and \ - (not os.path.exists(self.rustc()) or self.rustc_out_of_date()): - self.print_what_it_means_to_bootstrap() + (not os.path.exists(self.rustc()) or + self.program_out_of_date(self.rustc_stamp())): + self.print_what_bootstrap_means() if os.path.exists(self.bin_root()): shutil.rmtree(self.bin_root()) filename = "rust-std-{}-{}.tar.gz".format( rustc_channel, self.build) - url = self._download_url + "/dist/" + self.stage0_date() - tarball = os.path.join(rustc_cache, filename) - if not os.path.exists(tarball): - get("{}/{}".format(url, filename), - tarball, verbose=self.verbose) - unpack(tarball, self.bin_root(), - match="rust-std-" + self.build, - verbose=self.verbose) + pattern = "rust-std-{}".format(self.build) + self._download_stage0_helper(filename, pattern) filename = "rustc-{}-{}.tar.gz".format(rustc_channel, self.build) - url = self._download_url + "/dist/" + self.stage0_date() - tarball = os.path.join(rustc_cache, filename) - if not os.path.exists(tarball): - get("{}/{}".format(url, filename), - tarball, verbose=self.verbose) - unpack(tarball, self.bin_root(), - match="rustc", verbose=self.verbose) - self.fix_executable(self.bin_root() + "/bin/rustc") - self.fix_executable(self.bin_root() + "/bin/rustdoc") - with open(self.rustc_stamp(), 'w') as f: - f.write(self.stage0_date()) + self._download_stage0_helper(filename, "rustc") + self.fix_executable("{}/bin/rustc".format(self.bin_root())) + self.fix_executable("{}/bin/rustdoc".format(self.bin_root())) + with open(self.rustc_stamp(), 'w') as rust_stamp: + rust_stamp.write(self.date) if "pc-windows-gnu" in self.build: filename = "rust-mingw-{}-{}.tar.gz".format( rustc_channel, self.build) - url = self._download_url + "/dist/" + self.stage0_date() - tarball = os.path.join(rustc_cache, filename) - if not os.path.exists(tarball): - get("{}/{}".format(url, filename), - tarball, verbose=self.verbose) - unpack(tarball, self.bin_root(), - match="rust-mingw", verbose=self.verbose) + self._download_stage0_helper(filename, "rust-mingw") if self.cargo().startswith(self.bin_root()) and \ - (not os.path.exists(self.cargo()) or self.cargo_out_of_date()): - self.print_what_it_means_to_bootstrap() + (not os.path.exists(self.cargo()) or + self.program_out_of_date(self.cargo_stamp())): + self.print_what_bootstrap_means() filename = "cargo-{}-{}.tar.gz".format(cargo_channel, self.build) - url = self._download_url + "/dist/" + self.stage0_date() - tarball = os.path.join(rustc_cache, filename) - if not os.path.exists(tarball): - get("{}/{}".format(url, filename), - tarball, verbose=self.verbose) - unpack(tarball, self.bin_root(), - match="cargo", verbose=self.verbose) - self.fix_executable(self.bin_root() + "/bin/cargo") - with open(self.cargo_stamp(), 'w') as f: - f.write(self.stage0_date()) - - def fix_executable(self, fname): - # If we're on NixOS we need to change the path to the dynamic loader + self._download_stage0_helper(filename, "cargo") + self.fix_executable("{}/bin/cargo".format(self.bin_root())) + with open(self.cargo_stamp(), 'w') as cargo_stamp: + cargo_stamp.write(self.date) + + def _download_stage0_helper(self, filename, pattern): + cache_dst = os.path.join(self.build_dir, "cache") + rustc_cache = os.path.join(cache_dst, self.date) + if not os.path.exists(rustc_cache): + os.makedirs(rustc_cache) + + url = "{}/dist/{}".format(self._download_url, self.date) + tarball = os.path.join(rustc_cache, filename) + if not os.path.exists(tarball): + get("{}/{}".format(url, filename), tarball, verbose=self.verbose) + unpack(tarball, self.bin_root(), match=pattern, verbose=self.verbose) + @staticmethod + def fix_executable(fname): + """Modifies the interpreter section of 'fname' to fix the dynamic linker + + This method is only required on NixOS and uses the PatchELF utility to + change the dynamic linker of ELF executables. + + Please see https://nixos.org/patchelf.html for more information + """ default_encoding = sys.getdefaultencoding() try: ostype = subprocess.check_output( ['uname', '-s']).strip().decode(default_encoding) - except (subprocess.CalledProcessError, WindowsError): + except subprocess.CalledProcessError: return + except OSError as reason: + if getattr(reason, 'winerror', None) is not None: + return + raise reason if ostype != "Linux": return @@ -257,8 +279,8 @@ class RustBuild(object): interpreter = subprocess.check_output( ["patchelf", "--print-interpreter", fname]) interpreter = interpreter.strip().decode(default_encoding) - except subprocess.CalledProcessError as e: - print("warning: failed to call patchelf: %s" % e) + except subprocess.CalledProcessError as reason: + print("warning: failed to call patchelf:", reason) return loader = interpreter.split("/")[-1] @@ -267,8 +289,8 @@ class RustBuild(object): ldd_output = subprocess.check_output( ['ldd', '/run/current-system/sw/bin/sh']) ldd_output = ldd_output.strip().decode(default_encoding) - except subprocess.CalledProcessError as e: - print("warning: unable to call ldd: %s" % e) + except subprocess.CalledProcessError as reason: + print("warning: unable to call ldd:", reason) return for line in ldd_output.splitlines(): @@ -285,45 +307,66 @@ class RustBuild(object): try: subprocess.check_output( ["patchelf", "--set-interpreter", correct_interpreter, fname]) - except subprocess.CalledProcessError as e: - print("warning: failed to call patchelf: %s" % e) + except subprocess.CalledProcessError as reason: + print("warning: failed to call patchelf:", reason) return - def stage0_date(self): - return self._date - - def stage0_rustc_channel(self): - return self._rustc_channel - - def stage0_cargo_channel(self): - return self._cargo_channel - def rustc_stamp(self): - """Return the path for .rustc-stamp""" + """Return the path for .rustc-stamp + + >>> rb = RustBuild() + >>> rb.build_dir = "build" + >>> rb.rustc_stamp() == os.path.join("build", "stage0", ".rustc-stamp") + True + """ return os.path.join(self.bin_root(), '.rustc-stamp') def cargo_stamp(self): - """Return the path for .cargo-stamp""" - return os.path.join(self.bin_root(), '.cargo-stamp') + """Return the path for .cargo-stamp - def rustc_out_of_date(self): - """Check if rustc is out of date""" - if not os.path.exists(self.rustc_stamp()) or self.clean: - return True - with open(self.rustc_stamp(), 'r') as f: - return self.stage0_date() != f.read() + >>> rb = RustBuild() + >>> rb.build_dir = "build" + >>> rb.cargo_stamp() == os.path.join("build", "stage0", ".cargo-stamp") + True + """ + return os.path.join(self.bin_root(), '.cargo-stamp') - def cargo_out_of_date(self): - """Check if cargo is out of date""" - if not os.path.exists(self.cargo_stamp()) or self.clean: + def program_out_of_date(self, stamp_path): + """Check if the given program stamp is out of date""" + if not os.path.exists(stamp_path) or self.clean: return True - with open(self.cargo_stamp(), 'r') as f: - return self.stage0_date() != f.read() + with open(stamp_path, 'r') as stamp: + return self.date != stamp.read() def bin_root(self): + """Return the binary root directory + + >>> rb = RustBuild() + >>> rb.build_dir = "build" + >>> rb.bin_root() == os.path.join("build", "stage0") + True + + When the 'build' property is given should be a nested directory: + + >>> rb.build = "devel" + >>> rb.bin_root() == os.path.join("build", "devel", "stage0") + True + """ return os.path.join(self.build_dir, self.build, "stage0") def get_toml(self, key): + """Returns the value of the given key in config.toml, otherwise returns None + + >>> rb = RustBuild() + >>> rb.config_toml = 'key1 = "value1"\\nkey2 = "value2"' + >>> rb.get_toml("key2") + 'value2' + + If the key does not exists, the result is None: + + >>> rb.get_toml("key3") == None + True + """ for line in self.config_toml.splitlines(): match = re.match(r'^{}\s*=(.*)$'.format(key), line) if match is not None: @@ -332,6 +375,18 @@ class RustBuild(object): return None def get_mk(self, key): + """Returns the value of the given key in config.mk, otherwise returns None + + >>> rb = RustBuild() + >>> rb.config_mk = 'key := value\\n' + >>> rb.get_mk('key') + 'value' + + If the key does not exists, the result is None: + + >>> rb.get_mk('does_not_exists') == None + True + """ for line in iter(self.config_mk.splitlines()): if line.startswith(key + ' '): var = line[line.find(':=') + 2:].strip() @@ -340,36 +395,64 @@ class RustBuild(object): return None def cargo(self): - config = self.get_toml('cargo') - if config: - return config - config = self.get_mk('CFG_LOCAL_RUST_ROOT') - if config: - return config + '/bin/cargo' + self.exe_suffix() - return os.path.join(self.bin_root(), "bin/cargo" + self.exe_suffix()) + """Return config path for cargo""" + return self.program_config('cargo') def rustc(self): - config = self.get_toml('rustc') + """Return config path for rustc""" + return self.program_config('rustc') + + def program_config(self, program): + """Return config path for the given program + + >>> rb = RustBuild() + >>> rb.config_toml = 'rustc = "rustc"\\n' + >>> rb.config_mk = 'CFG_LOCAL_RUST_ROOT := /tmp/rust\\n' + >>> rb.program_config('rustc') + 'rustc' + >>> cargo_path = rb.program_config('cargo') + >>> cargo_path.rstrip(".exe") == os.path.join("/tmp/rust", + ... "bin", "cargo") + True + >>> rb.config_toml = '' + >>> rb.config_mk = '' + >>> cargo_path = rb.program_config('cargo') + >>> cargo_path.rstrip(".exe") == os.path.join(rb.bin_root(), + ... "bin", "cargo") + True + """ + config = self.get_toml(program) if config: return config config = self.get_mk('CFG_LOCAL_RUST_ROOT') if config: - return config + '/bin/rustc' + self.exe_suffix() - return os.path.join(self.bin_root(), "bin/rustc" + self.exe_suffix()) - - def get_string(self, line): + return os.path.join(config, "bin", "{}{}".format( + program, self.exe_suffix())) + return os.path.join(self.bin_root(), "bin", "{}{}".format( + program, self.exe_suffix())) + + @staticmethod + def get_string(line): + """Return the value between double quotes + + >>> RustBuild.get_string(' "devel" ') + 'devel' + """ start = line.find('"') if start == -1: return None end = start + 1 + line[start + 1:].find('"') return line[start + 1:end] - def exe_suffix(self): + @staticmethod + def exe_suffix(): + """Return a suffix for executables""" if sys.platform == 'win32': return '.exe' return '' - def print_what_it_means_to_bootstrap(self): + def print_what_bootstrap_means(self): + """Prints more information about the build system""" if hasattr(self, 'printed'): return self.printed = True @@ -386,10 +469,19 @@ class RustBuild(object): print(' src/bootstrap/README.md before the download finishes') def bootstrap_binary(self): - return os.path.join(self.build_dir, "bootstrap/debug/bootstrap") + """Return the path of the boostrap binary + + >>> rb = RustBuild() + >>> rb.build_dir = "build" + >>> rb.bootstrap_binary() == os.path.join("build", "bootstrap", + ... "debug", "bootstrap") + True + """ + return os.path.join(self.build_dir, "bootstrap", "debug", "bootstrap") def build_bootstrap(self): - self.print_what_it_means_to_bootstrap() + """Build bootstrap""" + self.print_what_bootstrap_means() build_dir = os.path.join(self.build_dir, "bootstrap") if self.clean and os.path.exists(build_dir): shutil.rmtree(build_dir) @@ -409,7 +501,8 @@ class RustBuild(object): env["PATH"] = os.path.join(self.bin_root(), "bin") + \ os.pathsep + env["PATH"] if not os.path.isfile(self.cargo()): - raise Exception("no cargo executable found at `%s`" % self.cargo()) + raise Exception("no cargo executable found at `{}`".format( + self.cargo())) args = [self.cargo(), "build", "--manifest-path", os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")] if self.verbose: @@ -423,6 +516,7 @@ class RustBuild(object): run(args, env=env, verbose=self.verbose) def build_triple(self): + """Build triple as in LLVM""" default_encoding = sys.getdefaultencoding() config = self.get_toml('build') if config: @@ -445,23 +539,26 @@ class RustBuild(object): # The goal here is to come up with the same triple as LLVM would, # at least for the subset of platforms we're willing to target. - if ostype == 'Linux': + ostype_mapper = { + 'Bitrig': 'unknown-bitrig', + 'Darwin': 'apple-darwin', + 'DragonFly': 'unknown-dragonfly', + 'FreeBSD': 'unknown-freebsd', + 'Haiku': 'unknown-haiku', + 'NetBSD': 'unknown-netbsd', + 'OpenBSD': 'unknown-openbsd' + } + + # Consider the direct transformation first and then the special cases + if ostype in ostype_mapper: + ostype = ostype_mapper[ostype] + elif ostype == 'Linux': os_from_sp = subprocess.check_output( ['uname', '-o']).strip().decode(default_encoding) if os_from_sp == 'Android': ostype = 'linux-android' else: ostype = 'unknown-linux-gnu' - elif ostype == 'FreeBSD': - ostype = 'unknown-freebsd' - elif ostype == 'DragonFly': - ostype = 'unknown-dragonfly' - elif ostype == 'Bitrig': - ostype = 'unknown-bitrig' - elif ostype == 'OpenBSD': - ostype = 'unknown-openbsd' - elif ostype == 'NetBSD': - ostype = 'unknown-netbsd' elif ostype == 'SunOS': ostype = 'sun-solaris' # On Solaris, uname -m will return a machine classification instead @@ -477,10 +574,6 @@ class RustBuild(object): if self.verbose: raise Exception(err) sys.exit(err) - elif ostype == 'Darwin': - ostype = 'apple-darwin' - elif ostype == 'Haiku': - ostype = 'unknown-haiku' elif ostype.startswith('MINGW'): # msys' `uname` does not print gcc configuration, but prints msys # configuration. so we cannot believe `uname -m`: @@ -499,13 +592,36 @@ class RustBuild(object): cputype = 'x86_64' ostype = 'pc-windows-gnu' else: - err = "unknown OS type: " + ostype + err = "unknown OS type: {}".format(ostype) if self.verbose: raise ValueError(err) sys.exit(err) - if cputype in {'i386', 'i486', 'i686', 'i786', 'x86'}: - cputype = 'i686' + cputype_mapper = { + 'BePC': 'i686', + 'aarch64': 'aarch64', + 'amd64': 'x86_64', + 'arm64': 'aarch64', + 'i386': 'i686', + 'i486': 'i686', + 'i686': 'i686', + 'i786': 'i686', + 'powerpc': 'powerpc', + 'powerpc64': 'powerpc64', + 'powerpc64le': 'powerpc64le', + 'ppc': 'powerpc', + 'ppc64': 'powerpc64', + 'ppc64le': 'powerpc64le', + 's390x': 's390x', + 'x64': 'x86_64', + 'x86': 'i686', + 'x86-64': 'x86_64', + 'x86_64': 'x86_64' + } + + # Consider the direct transformation first and then the special cases + if cputype in cputype_mapper: + cputype = cputype_mapper[cputype] elif cputype in {'xscale', 'arm'}: cputype = 'arm' if ostype == 'linux-android': @@ -522,40 +638,26 @@ class RustBuild(object): ostype = 'linux-androideabi' else: ostype += 'eabihf' - elif cputype in {'aarch64', 'arm64'}: - cputype = 'aarch64' elif cputype == 'mips': if sys.byteorder == 'big': cputype = 'mips' elif sys.byteorder == 'little': cputype = 'mipsel' else: - raise ValueError('unknown byteorder: ' + sys.byteorder) + raise ValueError("unknown byteorder: {}".format(sys.byteorder)) elif cputype == 'mips64': if sys.byteorder == 'big': cputype = 'mips64' elif sys.byteorder == 'little': cputype = 'mips64el' else: - raise ValueError('unknown byteorder: ' + sys.byteorder) + raise ValueError('unknown byteorder: {}'.format(sys.byteorder)) # only the n64 ABI is supported, indicate it ostype += 'abi64' - elif cputype in {'powerpc', 'ppc'}: - cputype = 'powerpc' - elif cputype in {'powerpc64', 'ppc64'}: - cputype = 'powerpc64' - elif cputype in {'powerpc64le', 'ppc64le'}: - cputype = 'powerpc64le' elif cputype == 'sparcv9': pass - elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}: - cputype = 'x86_64' - elif cputype == 's390x': - cputype = 's390x' - elif cputype == 'BePC': - cputype = 'i686' else: - err = "unknown cpu type: " + cputype + err = "unknown cpu type: {}".format(cputype) if self.verbose: raise ValueError(err) sys.exit(err) @@ -563,6 +665,7 @@ class RustBuild(object): return "{}-{}".format(cputype, ostype) def update_submodules(self): + """Update submodules""" if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \ self.get_toml('submodules') == "false" or \ self.get_mk('CFG_DISABLE_MANAGE_SUBMODULES') == "1": @@ -592,10 +695,16 @@ class RustBuild(object): "clean", "-qdfx"], cwd=self.rust_root, verbose=self.verbose) + def set_dev_environment(self): + """Set download URL for development environment""" + self._download_url = 'https://dev-static.rust-lang.org' + def bootstrap(): + """Configure, fetch, build and run the initial bootstrap""" parser = argparse.ArgumentParser(description='Build rust') parser.add_argument('--config') + parser.add_argument('--build') parser.add_argument('--clean', action='store_true') parser.add_argument('-v', '--verbose', action='store_true') @@ -603,107 +712,103 @@ def bootstrap(): args, _ = parser.parse_known_args(args) # Configure initial bootstrap - rb = RustBuild() - rb.config_toml = '' - rb.config_mk = '' - rb.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) - rb.build_dir = os.path.join(os.getcwd(), "build") - rb.verbose = args.verbose - rb.clean = args.clean + build = RustBuild() + build.verbose = args.verbose + build.clean = args.clean try: with open(args.config or 'config.toml') as config: - rb.config_toml = config.read() + build.config_toml = config.read() except: pass try: - rb.config_mk = open('config.mk').read() + build.config_mk = open('config.mk').read() except: pass - if '\nverbose = 2' in rb.config_toml: - rb.verbose = 2 - elif '\nverbose = 1' in rb.config_toml: - rb.verbose = 1 + if '\nverbose = 2' in build.config_toml: + build.verbose = 2 + elif '\nverbose = 1' in build.config_toml: + build.verbose = 1 - rb.use_vendored_sources = '\nvendor = true' in rb.config_toml or \ - 'CFG_ENABLE_VENDOR' in rb.config_mk + build.use_vendored_sources = '\nvendor = true' in build.config_toml or \ + 'CFG_ENABLE_VENDOR' in build.config_mk - rb.use_locked_deps = '\nlocked-deps = true' in rb.config_toml or \ - 'CFG_ENABLE_LOCKED_DEPS' in rb.config_mk + build.use_locked_deps = '\nlocked-deps = true' in build.config_toml or \ + 'CFG_ENABLE_LOCKED_DEPS' in build.config_mk - if 'SUDO_USER' in os.environ and not rb.use_vendored_sources: + if 'SUDO_USER' in os.environ and not build.use_vendored_sources: if os.environ.get('USER') != os.environ['SUDO_USER']: - rb.use_vendored_sources = True + build.use_vendored_sources = True print('info: looks like you are running this command under `sudo`') print(' and so in order to preserve your $HOME this will now') print(' use vendored sources by default. Note that if this') print(' does not work you should run a normal build first') print(' before running a command like `sudo make install`') - if rb.use_vendored_sources: + if build.use_vendored_sources: if not os.path.exists('.cargo'): os.makedirs('.cargo') - with open('.cargo/config', 'w') as f: - f.write(""" + with open('.cargo/config', 'w') as cargo_config: + cargo_config.write(""" [source.crates-io] replace-with = 'vendored-sources' registry = 'https://example.com' [source.vendored-sources] directory = '{}/src/vendor' - """.format(rb.rust_root)) + """.format(build.rust_root)) else: if os.path.exists('.cargo'): shutil.rmtree('.cargo') - data = stage0_data(rb.rust_root) - rb._date = data['date'] - rb._rustc_channel = data['rustc'] - rb._cargo_channel = data['cargo'] + data = stage0_data(build.rust_root) + build.date = data['date'] + build.rustc_channel = data['rustc'] + build.cargo_channel = data['cargo'] + if 'dev' in data: - rb._download_url = 'https://dev-static.rust-lang.org' - else: - rb._download_url = 'https://static.rust-lang.org' + build.set_dev_environment() - rb.update_submodules() + build.update_submodules() # Fetch/build the bootstrap - rb.build = rb.build_triple() - rb.download_stage0() + build.build = args.build or build.build_triple() + build.download_stage0() sys.stdout.flush() - rb.build_bootstrap() + build.build_bootstrap() sys.stdout.flush() # Run the bootstrap - args = [rb.bootstrap_binary()] + args = [build.bootstrap_binary()] args.extend(sys.argv[1:]) env = os.environ.copy() - env["BUILD"] = rb.build - env["SRC"] = rb.rust_root + env["BUILD"] = build.build + env["SRC"] = build.rust_root env["BOOTSTRAP_PARENT_ID"] = str(os.getpid()) env["BOOTSTRAP_PYTHON"] = sys.executable - run(args, env=env, verbose=rb.verbose) + run(args, env=env, verbose=build.verbose) def main(): + """Entry point for the bootstrap process""" start_time = time() help_triggered = ( '-h' in sys.argv) or ('--help' in sys.argv) or (len(sys.argv) == 1) try: bootstrap() if not help_triggered: - print("Build completed successfully in %s" % - format_build_time(time() - start_time)) - except (SystemExit, KeyboardInterrupt) as e: - if hasattr(e, 'code') and isinstance(e.code, int): - exit_code = e.code + print("Build completed successfully in {}".format( + format_build_time(time() - start_time))) + except (SystemExit, KeyboardInterrupt) as error: + if hasattr(error, 'code') and isinstance(error.code, int): + exit_code = error.code else: exit_code = 1 - print(e) + print(error) if not help_triggered: - print("Build completed unsuccessfully in %s" % - format_build_time(time() - start_time)) + print("Build completed unsuccessfully in {}".format( + format_build_time(time() - start_time))) sys.exit(exit_code) diff --git a/src/bootstrap/bootstrap_test.py b/src/bootstrap/bootstrap_test.py new file mode 100644 index 0000000000..a65a3a4042 --- /dev/null +++ b/src/bootstrap/bootstrap_test.py @@ -0,0 +1,114 @@ +# Copyright 2015-2016 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +"""Bootstrap tests""" + +import os +import doctest +import unittest +import tempfile +import hashlib + +from shutil import rmtree + +import bootstrap + + +class Stage0DataTestCase(unittest.TestCase): + """Test Case for stage0_data""" + def setUp(self): + self.rust_root = tempfile.mkdtemp() + os.mkdir(os.path.join(self.rust_root, "src")) + with open(os.path.join(self.rust_root, "src", + "stage0.txt"), "w") as stage0: + stage0.write("#ignore\n\ndate: 2017-06-15\nrustc: beta\ncargo: beta") + + def tearDown(self): + rmtree(self.rust_root) + + def test_stage0_data(self): + """Extract data from stage0.txt""" + expected = {"date": "2017-06-15", "rustc": "beta", "cargo": "beta"} + data = bootstrap.stage0_data(self.rust_root) + self.assertDictEqual(data, expected) + + +class VerifyTestCase(unittest.TestCase): + """Test Case for verify""" + def setUp(self): + self.container = tempfile.mkdtemp() + self.src = os.path.join(self.container, "src.txt") + self.sums = os.path.join(self.container, "sums") + self.bad_src = os.path.join(self.container, "bad.txt") + content = "Hello world" + + with open(self.src, "w") as src: + src.write(content) + with open(self.sums, "w") as sums: + sums.write(hashlib.sha256(content.encode("utf-8")).hexdigest()) + with open(self.bad_src, "w") as bad: + bad.write("Hello!") + + def tearDown(self): + rmtree(self.container) + + def test_valid_file(self): + """Check if the sha256 sum of the given file is valid""" + self.assertTrue(bootstrap.verify(self.src, self.sums, False)) + + def test_invalid_file(self): + """Should verify that the file is invalid""" + self.assertFalse(bootstrap.verify(self.bad_src, self.sums, False)) + + +class ProgramOutOfDate(unittest.TestCase): + """Test if a program is out of date""" + def setUp(self): + self.container = tempfile.mkdtemp() + os.mkdir(os.path.join(self.container, "stage0")) + self.build = bootstrap.RustBuild() + self.build.date = "2017-06-15" + self.build.build_dir = self.container + self.rustc_stamp_path = os.path.join(self.container, "stage0", + ".rustc-stamp") + + def tearDown(self): + rmtree(self.container) + + def test_stamp_path_does_not_exists(self): + """Return True when the stamp file does not exists""" + if os.path.exists(self.rustc_stamp_path): + os.unlink(self.rustc_stamp_path) + self.assertTrue(self.build.program_out_of_date(self.rustc_stamp_path)) + + def test_dates_are_different(self): + """Return True when the dates are different""" + with open(self.rustc_stamp_path, "w") as rustc_stamp: + rustc_stamp.write("2017-06-14") + self.assertTrue(self.build.program_out_of_date(self.rustc_stamp_path)) + + def test_same_dates(self): + """Return False both dates match""" + with open(self.rustc_stamp_path, "w") as rustc_stamp: + rustc_stamp.write("2017-06-15") + self.assertFalse(self.build.program_out_of_date(self.rustc_stamp_path)) + + +if __name__ == '__main__': + SUITE = unittest.TestSuite() + TEST_LOADER = unittest.TestLoader() + SUITE.addTest(doctest.DocTestSuite(bootstrap)) + SUITE.addTests([ + TEST_LOADER.loadTestsFromTestCase(Stage0DataTestCase), + TEST_LOADER.loadTestsFromTestCase(VerifyTestCase), + TEST_LOADER.loadTestsFromTestCase(ProgramOutOfDate)]) + + RUNNER = unittest.TextTestRunner(verbosity=2) + RUNNER.run(SUITE) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs new file mode 100644 index 0000000000..298f6a004a --- /dev/null +++ b/src/bootstrap/builder.rs @@ -0,0 +1,630 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::fmt::Debug; +use std::hash::Hash; +use std::cell::RefCell; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::fs; +use std::ops::Deref; +use std::any::Any; +use std::collections::BTreeSet; + +use compile; +use install; +use dist; +use util::{exe, libdir, add_lib_path}; +use {Build, Mode}; +use cache::{INTERNER, Interned, Cache}; +use check; +use flags::Subcommand; +use doc; +use tool; +use native; + +pub use Compiler; + +pub struct Builder<'a> { + pub build: &'a Build, + pub top_stage: u32, + pub kind: Kind, + cache: Cache, + stack: RefCell>>, +} + +impl<'a> Deref for Builder<'a> { + type Target = Build; + + fn deref(&self) -> &Self::Target { + self.build + } +} + +pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { + /// `PathBuf` when directories are created or to return a `Compiler` once + /// it's been assembled. + type Output: Clone; + + const DEFAULT: bool = false; + + /// Run this rule for all hosts without cross compiling. + const ONLY_HOSTS: bool = false; + + /// Run this rule for all targets, but only with the native host. + const ONLY_BUILD_TARGETS: bool = false; + + /// Only run this step with the build triple as host and target. + const ONLY_BUILD: bool = false; + + /// Primary function to execute this rule. Can call `builder.ensure(...)` + /// with other steps to run those. + fn run(self, builder: &Builder) -> Self::Output; + + /// When bootstrap is passed a set of paths, this controls whether this rule + /// will execute. However, it does not get called in a "default" context + /// when we are not passed any paths; in that case, make_run is called + /// directly. + fn should_run(run: ShouldRun) -> ShouldRun; + + /// Build up a "root" rule, either as a default rule or from a path passed + /// to us. + /// + /// When path is `None`, we are executing in a context where no paths were + /// passed. When `./x.py build` is run, for example, this rule could get + /// called if it is in the correct list below with a path of `None`. + fn make_run(_run: RunConfig) { + // It is reasonable to not have an implementation of make_run for rules + // who do not want to get called from the root context. This means that + // they are likely dependencies (e.g., sysroot creation) or similar, and + // as such calling them from ./x.py isn't logical. + unimplemented!() + } +} + +pub struct RunConfig<'a> { + pub builder: &'a Builder<'a>, + pub host: Interned, + pub target: Interned, + pub path: Option<&'a Path>, +} + +struct StepDescription { + default: bool, + only_hosts: bool, + only_build_targets: bool, + only_build: bool, + should_run: fn(ShouldRun) -> ShouldRun, + make_run: fn(RunConfig), +} + +impl StepDescription { + fn from() -> StepDescription { + StepDescription { + default: S::DEFAULT, + only_hosts: S::ONLY_HOSTS, + only_build_targets: S::ONLY_BUILD_TARGETS, + only_build: S::ONLY_BUILD, + should_run: S::should_run, + make_run: S::make_run, + } + } + + fn maybe_run(&self, builder: &Builder, path: Option<&Path>) { + let build = builder.build; + let hosts = if self.only_build_targets || self.only_build { + build.build_triple() + } else { + &build.hosts + }; + + // Determine the targets participating in this rule. + let targets = if self.only_hosts { + if build.config.run_host_only { + &[] + } else if self.only_build { + build.build_triple() + } else { + &build.hosts + } + } else { + &build.targets + }; + + for host in hosts { + for target in targets { + let run = RunConfig { + builder, + path, + host: *host, + target: *target, + }; + (self.make_run)(run); + } + } + } + + fn run(v: &[StepDescription], builder: &Builder, paths: &[PathBuf]) { + let should_runs = v.iter().map(|desc| { + (desc.should_run)(ShouldRun::new(builder)) + }).collect::>(); + if paths.is_empty() { + for (desc, should_run) in v.iter().zip(should_runs) { + if desc.default && should_run.is_really_default { + desc.maybe_run(builder, None); + } + } + } else { + for path in paths { + let mut attempted_run = false; + for (desc, should_run) in v.iter().zip(&should_runs) { + if should_run.run(path) { + attempted_run = true; + desc.maybe_run(builder, Some(path)); + } + } + + if !attempted_run { + eprintln!("Warning: no rules matched {}.", path.display()); + } + } + } + } +} + +#[derive(Clone)] +pub struct ShouldRun<'a> { + pub builder: &'a Builder<'a>, + // use a BTreeSet to maintain sort order + paths: BTreeSet, + + // If this is a default rule, this is an additional constraint placed on + // it's run. Generally something like compiler docs being enabled. + is_really_default: bool, +} + +impl<'a> ShouldRun<'a> { + fn new(builder: &'a Builder) -> ShouldRun<'a> { + ShouldRun { + builder, + paths: BTreeSet::new(), + is_really_default: true, // by default no additional conditions + } + } + + pub fn default_condition(mut self, cond: bool) -> Self { + self.is_really_default = cond; + self + } + + pub fn krate(mut self, name: &str) -> Self { + for (_, krate_path) in self.builder.crates(name) { + self.paths.insert(PathBuf::from(krate_path)); + } + self + } + + pub fn path(mut self, path: &str) -> Self { + self.paths.insert(PathBuf::from(path)); + self + } + + // allows being more explicit about why should_run in Step returns the value passed to it + pub fn never(self) -> ShouldRun<'a> { + self + } + + fn run(&self, path: &Path) -> bool { + self.paths.iter().any(|p| path.ends_with(p)) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum Kind { + Build, + Test, + Bench, + Dist, + Doc, + Install, +} + +impl<'a> Builder<'a> { + fn get_step_descriptions(kind: Kind) -> Vec { + macro_rules! describe { + ($($rule:ty),+ $(,)*) => {{ + vec![$(StepDescription::from::<$rule>()),+] + }}; + } + match kind { + Kind::Build => describe!(compile::Std, compile::Test, compile::Rustc, + compile::StartupObjects, tool::BuildManifest, tool::Rustbook, tool::ErrorIndex, + tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest, + tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient, + tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, + native::Llvm), + Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest, + check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Linkcheck, + check::Cargotest, check::Cargo, check::Rls, check::Docs, check::ErrorIndex, + check::Distcheck), + Kind::Bench => describe!(check::Crate, check::CrateLibrustc), + Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook, + doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon, + doc::Reference, doc::Rustdoc, doc::CargoBook), + Kind::Dist => describe!(dist::Docs, dist::Mingw, dist::Rustc, dist::DebuggerScripts, + dist::Std, dist::Analysis, dist::Src, dist::PlainSourceTarball, dist::Cargo, + dist::Rls, dist::Extended, dist::HashSign), + Kind::Install => describe!(install::Docs, install::Std, install::Cargo, install::Rls, + install::Analysis, install::Src, install::Rustc), + } + } + + pub fn get_help(build: &Build, subcommand: &str) -> Option { + let kind = match subcommand { + "build" => Kind::Build, + "doc" => Kind::Doc, + "test" => Kind::Test, + "bench" => Kind::Bench, + "dist" => Kind::Dist, + "install" => Kind::Install, + _ => return None, + }; + + let builder = Builder { + build, + top_stage: build.config.stage.unwrap_or(2), + kind, + cache: Cache::new(), + stack: RefCell::new(Vec::new()), + }; + + let builder = &builder; + let mut should_run = ShouldRun::new(builder); + for desc in Builder::get_step_descriptions(builder.kind) { + should_run = (desc.should_run)(should_run); + } + let mut help = String::from("Available paths:\n"); + for path in should_run.paths { + help.push_str(format!(" ./x.py {} {}\n", subcommand, path.display()).as_str()); + } + Some(help) + } + + pub fn run(build: &Build) { + let (kind, paths) = match build.config.cmd { + Subcommand::Build { ref paths } => (Kind::Build, &paths[..]), + Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]), + Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]), + Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]), + Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]), + Subcommand::Install { ref paths } => (Kind::Install, &paths[..]), + Subcommand::Clean => panic!(), + }; + + let builder = Builder { + build, + top_stage: build.config.stage.unwrap_or(2), + kind, + cache: Cache::new(), + stack: RefCell::new(Vec::new()), + }; + + StepDescription::run(&Builder::get_step_descriptions(builder.kind), &builder, paths); + } + + pub fn default_doc(&self, paths: Option<&[PathBuf]>) { + let paths = paths.unwrap_or(&[]); + StepDescription::run(&Builder::get_step_descriptions(Kind::Doc), self, paths); + } + + /// Obtain a compiler at a given stage and for a given host. Explicitly does + /// not take `Compiler` since all `Compiler` instances are meant to be + /// obtained through this function, since it ensures that they are valid + /// (i.e., built and assembled). + pub fn compiler(&self, stage: u32, host: Interned) -> Compiler { + self.ensure(compile::Assemble { target_compiler: Compiler { stage, host } }) + } + + pub fn sysroot(&self, compiler: Compiler) -> Interned { + self.ensure(compile::Sysroot { compiler }) + } + + /// Returns the libdir where the standard library and other artifacts are + /// found for a compiler's sysroot. + pub fn sysroot_libdir( + &self, compiler: Compiler, target: Interned + ) -> Interned { + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + struct Libdir { + compiler: Compiler, + target: Interned, + } + impl Step for Libdir { + type Output = Interned; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + fn run(self, builder: &Builder) -> Interned { + let compiler = self.compiler; + let lib = if compiler.stage >= 2 && builder.build.config.libdir_relative.is_some() { + builder.build.config.libdir_relative.clone().unwrap() + } else { + PathBuf::from("lib") + }; + let sysroot = builder.sysroot(self.compiler).join(lib) + .join("rustlib").join(self.target).join("lib"); + let _ = fs::remove_dir_all(&sysroot); + t!(fs::create_dir_all(&sysroot)); + INTERNER.intern_path(sysroot) + } + } + self.ensure(Libdir { compiler, target }) + } + + /// Returns the compiler's libdir where it stores the dynamic libraries that + /// it itself links against. + /// + /// For example this returns `/lib` on Unix and `/bin` on + /// Windows. + pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf { + if compiler.is_snapshot(self) { + self.build.rustc_snapshot_libdir() + } else { + self.sysroot(compiler).join(libdir(&compiler.host)) + } + } + + /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic + /// library lookup path. + pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut Command) { + // Windows doesn't need dylib path munging because the dlls for the + // compiler live next to the compiler and the system will find them + // automatically. + if cfg!(windows) { + return + } + + add_lib_path(vec![self.rustc_libdir(compiler)], cmd); + } + + /// Get a path to the compiler specified. + pub fn rustc(&self, compiler: Compiler) -> PathBuf { + if compiler.is_snapshot(self) { + self.initial_rustc.clone() + } else { + self.sysroot(compiler).join("bin").join(exe("rustc", &compiler.host)) + } + } + + pub fn rustdoc(&self, host: Interned) -> PathBuf { + self.ensure(tool::Rustdoc { host }) + } + + pub fn rustdoc_cmd(&self, host: Interned) -> Command { + let mut cmd = Command::new(&self.out.join("bootstrap/debug/rustdoc")); + let compiler = self.compiler(self.top_stage, host); + cmd + .env("RUSTC_STAGE", compiler.stage.to_string()) + .env("RUSTC_SYSROOT", self.sysroot(compiler)) + .env("RUSTC_LIBDIR", self.sysroot_libdir(compiler, self.build.build)) + .env("CFG_RELEASE_CHANNEL", &self.build.config.channel) + .env("RUSTDOC_REAL", self.rustdoc(host)); + cmd + } + + /// Prepares an invocation of `cargo` to be run. + /// + /// This will create a `Command` that represents a pending execution of + /// Cargo. This cargo will be configured to use `compiler` as the actual + /// rustc compiler, its output will be scoped by `mode`'s output directory, + /// it will pass the `--target` flag for the specified `target`, and will be + /// executing the Cargo command `cmd`. + pub fn cargo(&self, + compiler: Compiler, + mode: Mode, + target: Interned, + cmd: &str) -> Command { + let mut cargo = Command::new(&self.initial_cargo); + let out_dir = self.stage_out(compiler, mode); + cargo.env("CARGO_TARGET_DIR", out_dir) + .arg(cmd) + .arg("-j").arg(self.jobs().to_string()) + .arg("--target").arg(target); + + // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005 + // Force cargo to output binaries with disambiguating hashes in the name + cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel); + + let stage; + if compiler.stage == 0 && self.local_rebuild { + // Assume the local-rebuild rustc already has stage1 features. + stage = 1; + } else { + stage = compiler.stage; + } + + // Customize the compiler we're running. Specify the compiler to cargo + // as our shim and then pass it some various options used to configure + // how the actual compiler itself is called. + // + // These variables are primarily all read by + // src/bootstrap/bin/{rustc.rs,rustdoc.rs} + cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) + .env("RUSTC", self.out.join("bootstrap/debug/rustc")) + .env("RUSTC_REAL", self.rustc(compiler)) + .env("RUSTC_STAGE", stage.to_string()) + .env("RUSTC_CODEGEN_UNITS", + self.config.rust_codegen_units.to_string()) + .env("RUSTC_DEBUG_ASSERTIONS", + self.config.rust_debug_assertions.to_string()) + .env("RUSTC_SYSROOT", self.sysroot(compiler)) + .env("RUSTC_LIBDIR", self.rustc_libdir(compiler)) + .env("RUSTC_RPATH", self.config.rust_rpath.to_string()) + .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc")) + .env("RUSTDOC_REAL", if cmd == "doc" || cmd == "test" { + self.rustdoc(compiler.host) + } else { + PathBuf::from("/path/to/nowhere/rustdoc/not/required") + }) + .env("RUSTC_FLAGS", self.rustc_flags(target).join(" ")); + + if mode != Mode::Tool { + // Tools don't get debuginfo right now, e.g. cargo and rls don't + // get compiled with debuginfo. + cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string()) + .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string()) + .env("RUSTC_FORCE_UNSTABLE", "1"); + + // Currently the compiler depends on crates from crates.io, and + // then other crates can depend on the compiler (e.g. proc-macro + // crates). Let's say, for example that rustc itself depends on the + // bitflags crate. If an external crate then depends on the + // bitflags crate as well, we need to make sure they don't + // conflict, even if they pick the same version of bitflags. We'll + // want to make sure that e.g. a plugin and rustc each get their + // own copy of bitflags. + + // Cargo ensures that this works in general through the -C metadata + // flag. This flag will frob the symbols in the binary to make sure + // they're different, even though the source code is the exact + // same. To solve this problem for the compiler we extend Cargo's + // already-passed -C metadata flag with our own. Our rustc.rs + // wrapper around the actual rustc will detect -C metadata being + // passed and frob it with this extra string we're passing in. + cargo.env("RUSTC_METADATA_SUFFIX", "rustc"); + } + + if let Some(x) = self.crt_static(target) { + cargo.env("RUSTC_CRT_STATIC", x.to_string()); + } + + // Enable usage of unstable features + cargo.env("RUSTC_BOOTSTRAP", "1"); + self.add_rust_test_threads(&mut cargo); + + // Almost all of the crates that we compile as part of the bootstrap may + // have a build script, including the standard library. To compile a + // build script, however, it itself needs a standard library! This + // introduces a bit of a pickle when we're compiling the standard + // library itself. + // + // To work around this we actually end up using the snapshot compiler + // (stage0) for compiling build scripts of the standard library itself. + // The stage0 compiler is guaranteed to have a libstd available for use. + // + // For other crates, however, we know that we've already got a standard + // library up and running, so we can use the normal compiler to compile + // build scripts in that situation. + if mode == Mode::Libstd { + cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc) + .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()); + } else { + cargo.env("RUSTC_SNAPSHOT", self.rustc(compiler)) + .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler)); + } + + // Ignore incremental modes except for stage0, since we're + // not guaranteeing correctness across builds if the compiler + // is changing under your feet.` + if self.config.incremental && compiler.stage == 0 { + let incr_dir = self.incremental_dir(compiler); + cargo.env("RUSTC_INCREMENTAL", incr_dir); + } + + if let Some(ref on_fail) = self.config.on_fail { + cargo.env("RUSTC_ON_FAIL", on_fail); + } + + cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity)); + + // Specify some various options for build scripts used throughout + // the build. + // + // FIXME: the guard against msvc shouldn't need to be here + if !target.contains("msvc") { + cargo.env(format!("CC_{}", target), self.cc(target)) + .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None + .env(format!("CFLAGS_{}", target), self.cflags(target).join(" ")); + + if let Ok(cxx) = self.cxx(target) { + cargo.env(format!("CXX_{}", target), cxx); + } + } + + if mode == Mode::Libstd && self.config.extended && compiler.is_final_stage(self) { + cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string()); + } + + // Environment variables *required* throughout the build + // + // FIXME: should update code to not require this env var + cargo.env("CFG_COMPILER_HOST_TRIPLE", target); + + // Set this for all builds to make sure doc builds also get it. + cargo.env("CFG_RELEASE_CHANNEL", &self.build.config.channel); + + if self.is_verbose() { + cargo.arg("-v"); + } + // FIXME: cargo bench does not accept `--release` + if self.config.rust_optimize && cmd != "bench" { + cargo.arg("--release"); + } + if self.config.locked_deps { + cargo.arg("--locked"); + } + if self.config.vendor || self.is_sudo { + cargo.arg("--frozen"); + } + + self.ci_env.force_coloring_in_ci(&mut cargo); + + cargo + } + + /// Ensure that a given step is built, returning it's output. This will + /// cache the step, so it is safe (and good!) to call this as often as + /// needed to ensure that all dependencies are built. + pub fn ensure(&'a self, step: S) -> S::Output { + { + let mut stack = self.stack.borrow_mut(); + for stack_step in stack.iter() { + // should skip + if stack_step.downcast_ref::().map_or(true, |stack_step| *stack_step != step) { + continue; + } + let mut out = String::new(); + out += &format!("\n\nCycle in build detected when adding {:?}\n", step); + for el in stack.iter().rev() { + out += &format!("\t{:?}\n", el); + } + panic!(out); + } + if let Some(out) = self.cache.get(&step) { + self.build.verbose(&format!("{}c {:?}", " ".repeat(stack.len()), step)); + + return out; + } + self.build.verbose(&format!("{}> {:?}", " ".repeat(stack.len()), step)); + stack.push(Box::new(step.clone())); + } + let out = step.clone().run(self); + { + let mut stack = self.stack.borrow_mut(); + let cur_step = stack.pop().expect("step stack empty"); + assert_eq!(cur_step.downcast_ref(), Some(&step)); + } + self.build.verbose(&format!("{}< {:?}", " ".repeat(self.stack.borrow().len()), step)); + self.cache.put(step, out.clone()); + out + } +} diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs new file mode 100644 index 0000000000..c274931588 --- /dev/null +++ b/src/bootstrap/cache.rs @@ -0,0 +1,267 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::any::{Any, TypeId}; +use std::borrow::Borrow; +use std::cell::RefCell; +use std::collections::HashMap; +use std::convert::AsRef; +use std::ffi::OsStr; +use std::fmt; +use std::hash::{Hash, Hasher}; +use std::marker::PhantomData; +use std::mem; +use std::ops::Deref; +use std::path::{Path, PathBuf}; +use std::sync::Mutex; + +use builder::Step; + +pub struct Interned(usize, PhantomData<*const T>); + +impl Default for Interned { + fn default() -> Self { + INTERNER.intern_string(String::default()) + } +} + +impl Default for Interned { + fn default() -> Self { + INTERNER.intern_path(PathBuf::default()) + } +} + +impl Copy for Interned {} +impl Clone for Interned { + fn clone(&self) -> Interned { + *self + } +} + +impl PartialEq for Interned { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} +impl Eq for Interned {} + +impl PartialEq for Interned { + fn eq(&self, other: &str) -> bool { + *self == other + } +} +impl<'a> PartialEq<&'a str> for Interned { + fn eq(&self, other: &&str) -> bool { + **self == **other + } +} +impl<'a, T> PartialEq<&'a Interned> for Interned { + fn eq(&self, other: &&Self) -> bool { + self.0 == other.0 + } +} +impl<'a, T> PartialEq> for &'a Interned { + fn eq(&self, other: &Interned) -> bool { + self.0 == other.0 + } +} + +unsafe impl Send for Interned {} +unsafe impl Sync for Interned {} + +impl fmt::Display for Interned { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let s: &str = &*self; + f.write_str(s) + } +} + +impl fmt::Debug for Interned { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let s: &str = &*self; + f.write_fmt(format_args!("{:?}", s)) + } +} +impl fmt::Debug for Interned { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let s: &Path = &*self; + f.write_fmt(format_args!("{:?}", s)) + } +} + +impl Hash for Interned { + fn hash(&self, state: &mut H) { + let l = INTERNER.strs.lock().unwrap(); + l.get(*self).hash(state) + } +} + +impl Hash for Interned { + fn hash(&self, state: &mut H) { + let l = INTERNER.paths.lock().unwrap(); + l.get(*self).hash(state) + } +} + +impl Deref for Interned { + type Target = str; + fn deref(&self) -> &'static str { + let l = INTERNER.strs.lock().unwrap(); + unsafe { mem::transmute::<&str, &'static str>(l.get(*self)) } + } +} + +impl Deref for Interned { + type Target = Path; + fn deref(&self) -> &'static Path { + let l = INTERNER.paths.lock().unwrap(); + unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self)) } + } +} + +impl AsRef for Interned { + fn as_ref(&self) -> &'static Path { + let l = INTERNER.paths.lock().unwrap(); + unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self)) } + } +} + +impl AsRef for Interned { + fn as_ref(&self) -> &'static Path { + let l = INTERNER.strs.lock().unwrap(); + unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self).as_ref()) } + } +} + +impl AsRef for Interned { + fn as_ref(&self) -> &'static OsStr { + let l = INTERNER.paths.lock().unwrap(); + unsafe { mem::transmute::<&OsStr, &'static OsStr>(l.get(*self).as_ref()) } + } +} + +impl AsRef for Interned { + fn as_ref(&self) -> &'static OsStr { + let l = INTERNER.strs.lock().unwrap(); + unsafe { mem::transmute::<&OsStr, &'static OsStr>(l.get(*self).as_ref()) } + } +} + + +struct TyIntern { + items: Vec, + set: HashMap>, +} + +impl TyIntern { + fn new() -> TyIntern { + TyIntern { + items: Vec::new(), + set: HashMap::new(), + } + } + + fn intern_borrow(&mut self, item: &B) -> Interned + where + B: Eq + Hash + ToOwned + ?Sized, + T: Borrow, + { + if let Some(i) = self.set.get(&item) { + return *i; + } + let item = item.to_owned(); + let interned = Interned(self.items.len(), PhantomData::<*const T>); + self.set.insert(item.clone(), interned); + self.items.push(item); + interned + } + + fn intern(&mut self, item: T) -> Interned { + if let Some(i) = self.set.get(&item) { + return *i; + } + let interned = Interned(self.items.len(), PhantomData::<*const T>); + self.set.insert(item.clone(), interned); + self.items.push(item); + interned + } + + fn get(&self, i: Interned) -> &T { + &self.items[i.0] + } +} + +pub struct Interner { + strs: Mutex>, + paths: Mutex>, +} + +impl Interner { + fn new() -> Interner { + Interner { + strs: Mutex::new(TyIntern::new()), + paths: Mutex::new(TyIntern::new()), + } + } + + pub fn intern_str(&self, s: &str) -> Interned { + self.strs.lock().unwrap().intern_borrow(s) + } + pub fn intern_string(&self, s: String) -> Interned { + self.strs.lock().unwrap().intern(s) + } + + pub fn intern_path(&self, s: PathBuf) -> Interned { + self.paths.lock().unwrap().intern(s) + } +} + +lazy_static! { + pub static ref INTERNER: Interner = Interner::new(); +} + +/// This is essentially a HashMap which allows storing any type in its input and +/// any type in its output. It is a write-once cache; values are never evicted, +/// which means that references to the value can safely be returned from the +/// get() method. +#[derive(Debug)] +pub struct Cache( + RefCell, // actually a HashMap> + >> +); + +impl Cache { + pub fn new() -> Cache { + Cache(RefCell::new(HashMap::new())) + } + + pub fn put(&self, step: S, value: S::Output) { + let mut cache = self.0.borrow_mut(); + let type_id = TypeId::of::(); + let stepcache = cache.entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); + assert!(!stepcache.contains_key(&step), "processing {:?} a second time", step); + stepcache.insert(step, value); + } + + pub fn get(&self, step: &S) -> Option { + let mut cache = self.0.borrow_mut(); + let type_id = TypeId::of::(); + let stepcache = cache.entry(type_id) + .or_insert_with(|| Box::new(HashMap::::new())) + .downcast_mut::>() + .expect("invalid type mapped"); + stepcache.get(step).cloned() + } +} diff --git a/src/bootstrap/cc.rs b/src/bootstrap/cc.rs index 7c7161916e..0f25da8a23 100644 --- a/src/bootstrap/cc.rs +++ b/src/bootstrap/cc.rs @@ -32,25 +32,24 @@ //! everything. use std::process::Command; +use std::iter; use build_helper::{cc2ar, output}; use gcc; use Build; use config::Target; +use cache::Interned; pub fn find(build: &mut Build) { // For all targets we're going to need a C compiler for building some shims // and such as well as for being a linker for Rust code. - // - // This includes targets that aren't necessarily passed on the commandline - // (FIXME: Perhaps it shouldn't?) - for target in &build.config.target { + for target in build.targets.iter().chain(&build.hosts).cloned().chain(iter::once(build.build)) { let mut cfg = gcc::Config::new(); cfg.cargo_metadata(false).opt_level(0).debug(false) - .target(target).host(&build.build); + .target(&target).host(&build.build); - let config = build.config.target_config.get(target); + let config = build.config.target_config.get(&target); if let Some(cc) = config.and_then(|c| c.cc.as_ref()) { cfg.compiler(cc); } else { @@ -58,23 +57,20 @@ pub fn find(build: &mut Build) { } let compiler = cfg.get_compiler(); - let ar = cc2ar(compiler.path(), target); - build.verbose(&format!("CC_{} = {:?}", target, compiler.path())); + let ar = cc2ar(compiler.path(), &target); + build.verbose(&format!("CC_{} = {:?}", &target, compiler.path())); if let Some(ref ar) = ar { - build.verbose(&format!("AR_{} = {:?}", target, ar)); + build.verbose(&format!("AR_{} = {:?}", &target, ar)); } - build.cc.insert(target.to_string(), (compiler, ar)); + build.cc.insert(target, (compiler, ar)); } // For all host triples we need to find a C++ compiler as well - // - // This includes hosts that aren't necessarily passed on the commandline - // (FIXME: Perhaps it shouldn't?) - for host in &build.config.host { + for host in build.hosts.iter().cloned().chain(iter::once(build.build)) { let mut cfg = gcc::Config::new(); cfg.cargo_metadata(false).opt_level(0).debug(false).cpp(true) - .target(host).host(&build.build); - let config = build.config.target_config.get(host); + .target(&host).host(&build.build); + let config = build.config.target_config.get(&host); if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) { cfg.compiler(cxx); } else { @@ -82,16 +78,16 @@ pub fn find(build: &mut Build) { } let compiler = cfg.get_compiler(); build.verbose(&format!("CXX_{} = {:?}", host, compiler.path())); - build.cxx.insert(host.to_string(), compiler); + build.cxx.insert(host, compiler); } } fn set_compiler(cfg: &mut gcc::Config, gnu_compiler: &str, - target: &str, + target: Interned, config: Option<&Target>, build: &Build) { - match target { + match &*target { // When compiling for android we may have the NDK configured in the // config.toml in which case we look there. Otherwise the default // compiler already takes into account the triple in question. diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs index 16a7aa8b0a..b79c7de343 100644 --- a/src/bootstrap/channel.rs +++ b/src/bootstrap/channel.rs @@ -21,14 +21,15 @@ use std::process::Command; use build_helper::output; use Build; +use config::Config; // The version number -pub const CFG_RELEASE_NUM: &str = "1.20.0"; +pub const CFG_RELEASE_NUM: &str = "1.21.0"; // An optional number to put after the label, e.g. '.2' -> '-beta.2' // Be sure to make this starts with a dot to conform to semver pre-release // versions (section 9) -pub const CFG_PRERELEASE_VERSION: &str = ".3"; +pub const CFG_PRERELEASE_VERSION: &str = ".4"; pub struct GitInfo { inner: Option, @@ -41,9 +42,9 @@ struct Info { } impl GitInfo { - pub fn new(dir: &Path) -> GitInfo { + pub fn new(config: &Config, dir: &Path) -> GitInfo { // See if this even begins to look like a git dir - if !dir.join(".git").exists() { + if config.ignore_git || !dir.join(".git").exists() { return GitInfo { inner: None } } diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index e4b0e2fb9c..92fb2105b7 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -25,14 +25,19 @@ use std::io::Read; use build_helper::{self, output}; -use {Build, Compiler, Mode}; +use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step}; +use cache::{INTERNER, Interned}; +use compile; use dist; -use util::{self, dylib_path, dylib_path_var, exe}; +use native; +use tool::{self, Tool}; +use util::{self, dylib_path, dylib_path_var}; +use {Build, Mode}; const ADB_TEST_DIR: &str = "/data/tmp/work"; /// The two modes of the test runner; tests or benchmarks. -#[derive(Copy, Clone)] +#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub enum TestKind { /// Run `cargo test` Test, @@ -81,320 +86,735 @@ fn try_run_quiet(build: &Build, cmd: &mut Command) { } } -/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. -/// -/// This tool in `src/tools` will verify the validity of all our links in the -/// documentation to ensure we don't have a bunch of dead ones. -pub fn linkcheck(build: &Build, host: &str) { - println!("Linkcheck ({})", host); - let compiler = Compiler::new(0, host); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Linkcheck { + host: Interned, +} + +impl Step for Linkcheck { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` will verify the validity of all our links in the + /// documentation to ensure we don't have a bunch of dead ones. + fn run(self, builder: &Builder) { + let build = builder.build; + let host = self.host; + + println!("Linkcheck ({})", host); + + builder.default_doc(None); + + let _time = util::timeit(); + try_run(build, builder.tool_cmd(Tool::Linkchecker) + .arg(build.out.join(host).join("doc"))); + } - let _time = util::timeit(); - try_run(build, build.tool_cmd(&compiler, "linkchecker") - .arg(build.out.join(host).join("doc"))); + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/tools/linkchecker").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Linkcheck { host: run.target }); + } } -/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. -/// -/// This tool in `src/tools` will check out a few Rust projects and run `cargo -/// test` to ensure that we don't regress the test suites there. -pub fn cargotest(build: &Build, stage: u32, host: &str) { - let compiler = Compiler::new(stage, host); - - // Note that this is a short, cryptic, and not scoped directory name. This - // is currently to minimize the length of path on Windows where we otherwise - // quickly run into path name limit constraints. - let out_dir = build.out.join("ct"); - t!(fs::create_dir_all(&out_dir)); - - let _time = util::timeit(); - let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest")); - build.prepare_tool_cmd(&compiler, &mut cmd); - try_run(build, cmd.arg(&build.initial_cargo) - .arg(&out_dir) - .env("RUSTC", build.compiler_path(&compiler)) - .env("RUSTDOC", build.rustdoc(&compiler))); -} - -/// Runs `cargo test` for `cargo` packaged with Rust. -pub fn cargo(build: &Build, stage: u32, host: &str) { - let compiler = &Compiler::new(stage, host); - - let mut cargo = build.cargo(compiler, Mode::Tool, host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml")); - if !build.fail_fast { - cargo.arg("--no-fail-fast"); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Cargotest { + stage: u32, + host: Interned, +} + +impl Step for Cargotest { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/cargotest") } - // Don't build tests dynamically, just a pain to work with - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + fn make_run(run: RunConfig) { + run.builder.ensure(Cargotest { + stage: run.builder.top_stage, + host: run.target, + }); + } - // Don't run cross-compile tests, we may not have cross-compiled libstd libs - // available. - cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); + /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` will check out a few Rust projects and run `cargo + /// test` to ensure that we don't regress the test suites there. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = builder.compiler(self.stage, self.host); + builder.ensure(compile::Rustc { compiler, target: compiler.host }); + + // Note that this is a short, cryptic, and not scoped directory name. This + // is currently to minimize the length of path on Windows where we otherwise + // quickly run into path name limit constraints. + let out_dir = build.out.join("ct"); + t!(fs::create_dir_all(&out_dir)); + + let _time = util::timeit(); + let mut cmd = builder.tool_cmd(Tool::CargoTest); + try_run(build, cmd.arg(&build.initial_cargo) + .arg(&out_dir) + .env("RUSTC", builder.rustc(compiler)) + .env("RUSTDOC", builder.rustdoc(compiler.host))); + } +} - try_run(build, cargo.env("PATH", &path_for_cargo(build, compiler))); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Cargo { + stage: u32, + host: Interned, } -/// Runs `cargo test` for the rls. -pub fn rls(build: &Build, stage: u32, host: &str) { - let compiler = &Compiler::new(stage, host); +impl Step for Cargo { + type Output = (); + const ONLY_HOSTS: bool = true; - let mut cargo = build.cargo(compiler, Mode::Tool, host, "test"); - cargo.arg("--manifest-path").arg(build.src.join("src/tools/rls/Cargo.toml")); + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/cargo") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Cargo { + stage: run.builder.top_stage, + host: run.target, + }); + } - // Don't build tests dynamically, just a pain to work with - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + /// Runs `cargo test` for `cargo` packaged with Rust. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = builder.compiler(self.stage, self.host); - build.add_rustc_lib_path(compiler, &mut cargo); + builder.ensure(tool::Cargo { compiler, target: self.host }); + let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test"); + cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml")); + if !build.fail_fast { + cargo.arg("--no-fail-fast"); + } - try_run(build, &mut cargo); + // Don't build tests dynamically, just a pain to work with + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + // Don't run cross-compile tests, we may not have cross-compiled libstd libs + // available. + cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); + + try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler))); + } } -fn path_for_cargo(build: &Build, compiler: &Compiler) -> OsString { +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rls { + stage: u32, + host: Interned, +} + +impl Step for Rls { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/rls") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rls { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Runs `cargo test` for the rls. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let host = self.host; + let compiler = builder.compiler(stage, host); + + builder.ensure(tool::Rls { compiler, target: self.host }); + let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test"); + cargo.arg("--manifest-path").arg(build.src.join("src/tools/rls/Cargo.toml")); + + // Don't build tests dynamically, just a pain to work with + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + builder.add_rustc_lib_path(compiler, &mut cargo); + + try_run(build, &mut cargo); + } +} + +fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString { // Configure PATH to find the right rustc. NB. we have to use PATH // and not RUSTC because the Cargo test suite has tests that will // fail if rustc is not spelled `rustc`. - let path = build.sysroot(compiler).join("bin"); + let path = builder.sysroot(compiler).join("bin"); let old_path = env::var_os("PATH").unwrap_or_default(); env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") } -/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler. -/// -/// This tool in `src/tools` checks up on various bits and pieces of style and -/// otherwise just implements a few lint-like checks that are specific to the -/// compiler itself. -pub fn tidy(build: &Build, host: &str) { - let _folder = build.fold_output(|| "tidy"); - println!("tidy check ({})", host); - let compiler = Compiler::new(0, host); - let mut cmd = build.tool_cmd(&compiler, "tidy"); - cmd.arg(build.src.join("src")); - if !build.config.vendor { - cmd.arg("--no-vendor"); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Tidy { + host: Interned, +} + +impl Step for Tidy { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD: bool = true; + + /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler. + /// + /// This tool in `src/tools` checks up on various bits and pieces of style and + /// otherwise just implements a few lint-like checks that are specific to the + /// compiler itself. + fn run(self, builder: &Builder) { + let build = builder.build; + let host = self.host; + + let _folder = build.fold_output(|| "tidy"); + println!("tidy check ({})", host); + let mut cmd = builder.tool_cmd(Tool::Tidy); + cmd.arg(build.src.join("src")); + if !build.config.vendor { + cmd.arg("--no-vendor"); + } + if build.config.quiet_tests { + cmd.arg("--quiet"); + } + try_run(build, &mut cmd); } - if build.config.quiet_tests { - cmd.arg("--quiet"); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/tidy") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Tidy { + host: run.builder.build.build, + }); } - try_run(build, &mut cmd); } -fn testdir(build: &Build, host: &str) -> PathBuf { +fn testdir(build: &Build, host: Interned) -> PathBuf { build.out.join(host).join("test") } -/// Executes the `compiletest` tool to run a suite of tests. -/// -/// Compiles all tests with `compiler` for `target` with the specified -/// compiletest `mode` and `suite` arguments. For example `mode` can be -/// "run-pass" or `suite` can be something like `debuginfo`. -pub fn compiletest(build: &Build, - compiler: &Compiler, - target: &str, - mode: &str, - suite: &str) { - let _folder = build.fold_output(|| format!("test_{}", suite)); - println!("Check compiletest suite={} mode={} ({} -> {})", - suite, mode, compiler.host, target); - let mut cmd = Command::new(build.tool(&Compiler::new(0, compiler.host), - "compiletest")); - build.prepare_tool_cmd(compiler, &mut cmd); - - // compiletest currently has... a lot of arguments, so let's just pass all - // of them! - - cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler)); - cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target)); - cmd.arg("--rustc-path").arg(build.compiler_path(compiler)); - cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler)); - cmd.arg("--src-base").arg(build.src.join("src/test").join(suite)); - cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite)); - cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target)); - cmd.arg("--mode").arg(mode); - cmd.arg("--target").arg(target); - cmd.arg("--host").arg(compiler.host); - cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.build)); - - if let Some(ref nodejs) = build.config.nodejs { - cmd.arg("--nodejs").arg(nodejs); - } - - let mut flags = vec!["-Crpath".to_string()]; - if build.config.rust_optimize_tests { - flags.push("-O".to_string()); - } - if build.config.rust_debuginfo_tests { - flags.push("-g".to_string()); - } - - let mut hostflags = build.rustc_flags(&compiler.host); - hostflags.extend(flags.clone()); - cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); - - let mut targetflags = build.rustc_flags(&target); - targetflags.extend(flags); - targetflags.push(format!("-Lnative={}", - build.test_helpers_out(target).display())); - cmd.arg("--target-rustcflags").arg(targetflags.join(" ")); - - cmd.arg("--docck-python").arg(build.python()); - - if build.build.ends_with("apple-darwin") { - // Force /usr/bin/python on macOS for LLDB tests because we're loading the - // LLDB plugin's compiled module which only works with the system python - // (namely not Homebrew-installed python) - cmd.arg("--lldb-python").arg("/usr/bin/python"); - } else { - cmd.arg("--lldb-python").arg(build.python()); - } +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct Test { + path: &'static str, + mode: &'static str, + suite: &'static str, +} - if let Some(ref gdb) = build.config.gdb { - cmd.arg("--gdb").arg(gdb); - } - if let Some(ref vers) = build.lldb_version { - cmd.arg("--lldb-version").arg(vers); +static DEFAULT_COMPILETESTS: &[Test] = &[ + Test { path: "src/test/ui", mode: "ui", suite: "ui" }, + Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" }, + Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" }, + Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" }, + Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" }, + Test { + path: "src/test/run-pass-valgrind", + mode: "run-pass-valgrind", + suite: "run-pass-valgrind" + }, + Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" }, + Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" }, + Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" }, + Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" }, + + // What this runs varies depending on the native platform being apple + Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" }, +]; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct DefaultCompiletest { + compiler: Compiler, + target: Interned, + mode: &'static str, + suite: &'static str, +} + +impl Step for DefaultCompiletest { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(mut run: ShouldRun) -> ShouldRun { + for test in DEFAULT_COMPILETESTS { + run = run.path(test.path); + } + run } - if let Some(ref dir) = build.lldb_python_dir { - cmd.arg("--lldb-python-dir").arg(dir); + + fn make_run(run: RunConfig) { + let compiler = run.builder.compiler(run.builder.top_stage, run.host); + + let test = run.path.map(|path| { + DEFAULT_COMPILETESTS.iter().find(|&&test| { + path.ends_with(test.path) + }).unwrap_or_else(|| { + panic!("make_run in compile test to receive test path, received {:?}", path); + }) + }); + + if let Some(test) = test { + run.builder.ensure(DefaultCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite, + }); + } else { + for test in DEFAULT_COMPILETESTS { + run.builder.ensure(DefaultCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite + }); + } + } } - let llvm_config = build.llvm_config(target); - let llvm_version = output(Command::new(&llvm_config).arg("--version")); - cmd.arg("--llvm-version").arg(llvm_version); - if !build.is_rust_llvm(target) { - cmd.arg("--system-llvm"); + + fn run(self, builder: &Builder) { + builder.ensure(Compiletest { + compiler: self.compiler, + target: self.target, + mode: self.mode, + suite: self.suite, + }) } +} + +// Also default, but host-only. +static HOST_COMPILETESTS: &[Test] = &[ + Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" }, + Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" }, + Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" }, + Test { + path: "src/test/compile-fail-fulldeps", + mode: "compile-fail", + suite: "compile-fail-fulldeps", + }, + Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" }, + Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" }, + + Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" }, + Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" }, + Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" }, + Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" }, + Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" }, + Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" }, +]; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct HostCompiletest { + compiler: Compiler, + target: Interned, + mode: &'static str, + suite: &'static str, +} - cmd.args(&build.flags.cmd.test_args()); +impl Step for HostCompiletest { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; - if build.is_verbose() { - cmd.arg("--verbose"); + fn should_run(mut run: ShouldRun) -> ShouldRun { + for test in HOST_COMPILETESTS { + run = run.path(test.path); + } + run } - if build.config.quiet_tests { - cmd.arg("--quiet"); - } - - // Only pass correct values for these flags for the `run-make` suite as it - // requires that a C++ compiler was configured which isn't always the case. - if suite == "run-make" { - let llvm_components = output(Command::new(&llvm_config).arg("--components")); - let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags")); - cmd.arg("--cc").arg(build.cc(target)) - .arg("--cxx").arg(build.cxx(target).unwrap()) - .arg("--cflags").arg(build.cflags(target).join(" ")) - .arg("--llvm-components").arg(llvm_components.trim()) - .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim()); - } else { - cmd.arg("--cc").arg("") - .arg("--cxx").arg("") - .arg("--cflags").arg("") - .arg("--llvm-components").arg("") - .arg("--llvm-cxxflags").arg(""); - } - - if build.remote_tested(target) { - cmd.arg("--remote-test-client") - .arg(build.tool(&Compiler::new(0, &build.build), - "remote-test-client")); - } - - // Running a C compiler on MSVC requires a few env vars to be set, to be - // sure to set them here. - // - // Note that if we encounter `PATH` we make sure to append to our own `PATH` - // rather than stomp over it. - if target.contains("msvc") { - for &(ref k, ref v) in build.cc[target].0.env() { - if k != "PATH" { - cmd.env(k, v); + fn make_run(run: RunConfig) { + let compiler = run.builder.compiler(run.builder.top_stage, run.host); + + let test = run.path.map(|path| { + HOST_COMPILETESTS.iter().find(|&&test| { + path.ends_with(test.path) + }).unwrap_or_else(|| { + panic!("make_run in compile test to receive test path, received {:?}", path); + }) + }); + + if let Some(test) = test { + run.builder.ensure(HostCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite, + }); + } else { + for test in HOST_COMPILETESTS { + if test.mode == "pretty" { + continue; + } + run.builder.ensure(HostCompiletest { + compiler, + target: run.target, + mode: test.mode, + suite: test.suite + }); } } } - cmd.env("RUSTC_BOOTSTRAP", "1"); - build.add_rust_test_threads(&mut cmd); - if build.config.sanitizers { - cmd.env("SANITIZER_SUPPORT", "1"); + fn run(self, builder: &Builder) { + builder.ensure(Compiletest { + compiler: self.compiler, + target: self.target, + mode: self.mode, + suite: self.suite, + }) } +} - if build.config.profiler { - cmd.env("PROFILER_SUPPORT", "1"); - } +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct Compiletest { + compiler: Compiler, + target: Interned, + mode: &'static str, + suite: &'static str, +} - cmd.arg("--adb-path").arg("adb"); - cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); - if target.contains("android") { - // Assume that cc for this target comes from the android sysroot - cmd.arg("--android-cross-path") - .arg(build.cc(target).parent().unwrap().parent().unwrap()); - } else { - cmd.arg("--android-cross-path").arg(""); +impl Step for Compiletest { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() } - build.ci_env.force_coloring_in_ci(&mut cmd); + /// Executes the `compiletest` tool to run a suite of tests. + /// + /// Compiles all tests with `compiler` for `target` with the specified + /// compiletest `mode` and `suite` arguments. For example `mode` can be + /// "run-pass" or `suite` can be something like `debuginfo`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + let mode = self.mode; + let suite = self.suite; + + // Skip codegen tests if they aren't enabled in configuration. + if !build.config.codegen_tests && suite == "codegen" { + return; + } - let _time = util::timeit(); - try_run(build, &mut cmd); -} + if suite == "debuginfo" { + // Skip debuginfo tests on MSVC + if build.build.contains("msvc") { + return; + } -/// Run `rustdoc --test` for all documentation in `src/doc`. -/// -/// This will run all tests in our markdown documentation (e.g. the book) -/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to -/// `compiler`. -pub fn docs(build: &Build, compiler: &Compiler) { - // Do a breadth-first traversal of the `src/doc` directory and just run - // tests for all files that end in `*.md` - let mut stack = vec![build.src.join("src/doc")]; - let _time = util::timeit(); - let _folder = build.fold_output(|| "test_docs"); + if mode == "debuginfo-XXX" { + return if build.build.contains("apple") { + builder.ensure(Compiletest { + mode: "debuginfo-lldb", + ..self + }); + } else { + builder.ensure(Compiletest { + mode: "debuginfo-gdb", + ..self + }); + }; + } + + builder.ensure(dist::DebuggerScripts { + sysroot: builder.sysroot(compiler), + host: target + }); + } + + if suite.ends_with("fulldeps") || + // FIXME: Does pretty need librustc compiled? Note that there are + // fulldeps test suites with mode = pretty as well. + mode == "pretty" || + mode == "rustdoc" || + mode == "run-make" { + builder.ensure(compile::Rustc { compiler, target }); + } + + builder.ensure(compile::Test { compiler, target }); + builder.ensure(native::TestHelpers { target }); + builder.ensure(RemoteCopyLibs { compiler, target }); + + let _folder = build.fold_output(|| format!("test_{}", suite)); + println!("Check compiletest suite={} mode={} ({} -> {})", + suite, mode, &compiler.host, target); + let mut cmd = builder.tool_cmd(Tool::Compiletest); + + // compiletest currently has... a lot of arguments, so let's just pass all + // of them! + + cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler)); + cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target)); + cmd.arg("--rustc-path").arg(builder.rustc(compiler)); + + // Avoid depending on rustdoc when we don't need it. + if mode == "rustdoc" || mode == "run-make" { + cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler.host)); + } + + cmd.arg("--src-base").arg(build.src.join("src/test").join(suite)); + cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite)); + cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target)); + cmd.arg("--mode").arg(mode); + cmd.arg("--target").arg(target); + cmd.arg("--host").arg(&*compiler.host); + cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build)); + + if let Some(ref nodejs) = build.config.nodejs { + cmd.arg("--nodejs").arg(nodejs); + } + + let mut flags = vec!["-Crpath".to_string()]; + if build.config.rust_optimize_tests { + flags.push("-O".to_string()); + } + if build.config.rust_debuginfo_tests { + flags.push("-g".to_string()); + } + + let mut hostflags = build.rustc_flags(compiler.host); + hostflags.extend(flags.clone()); + cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); + + let mut targetflags = build.rustc_flags(target); + targetflags.extend(flags); + targetflags.push(format!("-Lnative={}", + build.test_helpers_out(target).display())); + cmd.arg("--target-rustcflags").arg(targetflags.join(" ")); + + cmd.arg("--docck-python").arg(build.python()); + + if build.build.ends_with("apple-darwin") { + // Force /usr/bin/python on macOS for LLDB tests because we're loading the + // LLDB plugin's compiled module which only works with the system python + // (namely not Homebrew-installed python) + cmd.arg("--lldb-python").arg("/usr/bin/python"); + } else { + cmd.arg("--lldb-python").arg(build.python()); + } + + if let Some(ref gdb) = build.config.gdb { + cmd.arg("--gdb").arg(gdb); + } + if let Some(ref vers) = build.lldb_version { + cmd.arg("--lldb-version").arg(vers); + } + if let Some(ref dir) = build.lldb_python_dir { + cmd.arg("--lldb-python-dir").arg(dir); + } + + cmd.args(&build.config.cmd.test_args()); + + if build.is_verbose() { + cmd.arg("--verbose"); + } + + if build.config.quiet_tests { + cmd.arg("--quiet"); + } + + if build.config.llvm_enabled { + let llvm_config = build.llvm_config(target); + let llvm_version = output(Command::new(&llvm_config).arg("--version")); + cmd.arg("--llvm-version").arg(llvm_version); + if !build.is_rust_llvm(target) { + cmd.arg("--system-llvm"); + } - while let Some(p) = stack.pop() { - if p.is_dir() { - stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); - continue + // Only pass correct values for these flags for the `run-make` suite as it + // requires that a C++ compiler was configured which isn't always the case. + if suite == "run-make" { + let llvm_components = output(Command::new(&llvm_config).arg("--components")); + let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags")); + cmd.arg("--cc").arg(build.cc(target)) + .arg("--cxx").arg(build.cxx(target).unwrap()) + .arg("--cflags").arg(build.cflags(target).join(" ")) + .arg("--llvm-components").arg(llvm_components.trim()) + .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim()); + } + } + if suite == "run-make" && !build.config.llvm_enabled { + println!("Ignoring run-make test suite as they generally dont work without LLVM"); + return; + } + + if suite != "run-make" { + cmd.arg("--cc").arg("") + .arg("--cxx").arg("") + .arg("--cflags").arg("") + .arg("--llvm-components").arg("") + .arg("--llvm-cxxflags").arg(""); } - if p.extension().and_then(|s| s.to_str()) != Some("md") { - continue; + if build.remote_tested(target) { + cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient)); + } + + // Running a C compiler on MSVC requires a few env vars to be set, to be + // sure to set them here. + // + // Note that if we encounter `PATH` we make sure to append to our own `PATH` + // rather than stomp over it. + if target.contains("msvc") { + for &(ref k, ref v) in build.cc[&target].0.env() { + if k != "PATH" { + cmd.env(k, v); + } + } } + cmd.env("RUSTC_BOOTSTRAP", "1"); + build.add_rust_test_threads(&mut cmd); - // The nostarch directory in the book is for no starch, and so isn't - // guaranteed to build. We don't care if it doesn't build, so skip it. - if p.to_str().map_or(false, |p| p.contains("nostarch")) { - continue; + if build.config.sanitizers { + cmd.env("SANITIZER_SUPPORT", "1"); } - markdown_test(build, compiler, &p); + if build.config.profiler { + cmd.env("PROFILER_SUPPORT", "1"); + } + + cmd.arg("--adb-path").arg("adb"); + cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); + if target.contains("android") { + // Assume that cc for this target comes from the android sysroot + cmd.arg("--android-cross-path") + .arg(build.cc(target).parent().unwrap().parent().unwrap()); + } else { + cmd.arg("--android-cross-path").arg(""); + } + + build.ci_env.force_coloring_in_ci(&mut cmd); + + let _time = util::timeit(); + try_run(build, &mut cmd); } } -/// Run the error index generator tool to execute the tests located in the error -/// index. -/// -/// The `error_index_generator` tool lives in `src/tools` and is used to -/// generate a markdown file from the error indexes of the code base which is -/// then passed to `rustdoc --test`. -pub fn error_index(build: &Build, compiler: &Compiler) { - let _folder = build.fold_output(|| "test_error_index"); - println!("Testing error-index stage{}", compiler.stage); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Docs { + compiler: Compiler, +} + +impl Step for Docs { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/doc") + } - let dir = testdir(build, compiler.host); - t!(fs::create_dir_all(&dir)); - let output = dir.join("error-index.md"); + fn make_run(run: RunConfig) { + run.builder.ensure(Docs { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + }); + } + + /// Run `rustdoc --test` for all documentation in `src/doc`. + /// + /// This will run all tests in our markdown documentation (e.g. the book) + /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to + /// `compiler`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + + builder.ensure(compile::Test { compiler, target: compiler.host }); + + // Do a breadth-first traversal of the `src/doc` directory and just run + // tests for all files that end in `*.md` + let mut stack = vec![build.src.join("src/doc")]; + let _time = util::timeit(); + let _folder = build.fold_output(|| "test_docs"); + + while let Some(p) = stack.pop() { + if p.is_dir() { + stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); + continue + } - let _time = util::timeit(); - build.run(build.tool_cmd(&Compiler::new(0, compiler.host), - "error_index_generator") - .arg("markdown") - .arg(&output) - .env("CFG_BUILD", &build.build)); + if p.extension().and_then(|s| s.to_str()) != Some("md") { + continue; + } - markdown_test(build, compiler, &output); + // The nostarch directory in the book is for no starch, and so isn't + // guaranteed to build. We don't care if it doesn't build, so skip it. + if p.to_str().map_or(false, |p| p.contains("nostarch")) { + continue; + } + + markdown_test(builder, compiler, &p); + } + } } -fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) { +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ErrorIndex { + compiler: Compiler, +} + +impl Step for ErrorIndex { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/error_index_generator") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(ErrorIndex { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + }); + } + + /// Run the error index generator tool to execute the tests located in the error + /// index. + /// + /// The `error_index_generator` tool lives in `src/tools` and is used to + /// generate a markdown file from the error indexes of the code base which is + /// then passed to `rustdoc --test`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + + builder.ensure(compile::Std { compiler, target: compiler.host }); + + let _folder = build.fold_output(|| "test_error_index"); + println!("Testing error-index stage{}", compiler.stage); + + let dir = testdir(build, compiler.host); + t!(fs::create_dir_all(&dir)); + let output = dir.join("error-index.md"); + + let _time = util::timeit(); + build.run(builder.tool_cmd(Tool::ErrorIndex) + .arg("markdown") + .arg(&output) + .env("CFG_BUILD", &build.build)); + + markdown_test(builder, compiler, &output); + } +} + +fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) { + let build = builder.build; let mut file = t!(File::open(markdown)); let mut contents = String::new(); t!(file.read_to_string(&mut contents)); @@ -403,14 +823,13 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) { } println!("doc tests for: {}", markdown.display()); - let mut cmd = Command::new(build.rustdoc(compiler)); - build.add_rustc_lib_path(compiler, &mut cmd); + let mut cmd = builder.rustdoc_cmd(compiler.host); build.add_rust_test_threads(&mut cmd); cmd.arg("--test"); cmd.arg(markdown); cmd.env("RUSTC_BOOTSTRAP", "1"); - let test_args = build.flags.cmd.test_args().join(" "); + let test_args = build.config.cmd.test_args().join(" "); cmd.arg("--test-args").arg(test_args); if build.config.quiet_tests { @@ -420,281 +839,412 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) { } } -/// Run all unit tests plus documentation tests for an entire crate DAG defined -/// by a `Cargo.toml` -/// -/// This is what runs tests for crates like the standard library, compiler, etc. -/// It essentially is the driver for running `cargo test`. -/// -/// Currently this runs all tests for a DAG by passing a bunch of `-p foo` -/// arguments, and those arguments are discovered from `cargo metadata`. -pub fn krate(build: &Build, - compiler: &Compiler, - target: &str, - mode: Mode, - test_kind: TestKind, - krate: Option<&str>) { - let (name, path, features, root) = match mode { - Mode::Libstd => { - ("libstd", "src/libstd", build.std_features(), "std") - } - Mode::Libtest => { - ("libtest", "src/libtest", String::new(), "test") - } - Mode::Librustc => { - ("librustc", "src/rustc", build.rustc_features(), "rustc-main") - } - _ => panic!("can only test libraries"), - }; - let _folder = build.fold_output(|| { - format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name) - }); - println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage, - compiler.host, target); - - // If we're not doing a full bootstrap but we're testing a stage2 version of - // libstd, then what we're actually testing is the libstd produced in - // stage1. Reflect that here by updating the compiler that we're working - // with automatically. - let compiler = if build.force_use_stage1(compiler, target) { - Compiler::new(1, compiler.host) - } else { - compiler.clone() - }; - - // Build up the base `cargo test` command. - // - // Pass in some standard flags then iterate over the graph we've discovered - // in `cargo metadata` with the maps above and figure out what `-p` - // arguments need to get passed. - let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand()); - cargo.arg("--manifest-path") - .arg(build.src.join(path).join("Cargo.toml")) - .arg("--features").arg(features); - if test_kind.subcommand() == "test" && !build.fail_fast { - cargo.arg("--no-fail-fast"); - } - - match krate { - Some(krate) => { - cargo.arg("-p").arg(krate); - } - None => { - let mut visited = HashSet::new(); - let mut next = vec![root]; - while let Some(name) = next.pop() { - // Right now jemalloc is our only target-specific crate in the - // sense that it's not present on all platforms. Custom skip it - // here for now, but if we add more this probably wants to get - // more generalized. - // - // Also skip `build_helper` as it's not compiled normally for - // target during the bootstrap and it's just meant to be a - // helper crate, not tested. If it leaks through then it ends up - // messing with various mtime calculations and such. - if !name.contains("jemalloc") && name != "build_helper" { - cargo.arg("-p").arg(&format!("{}:0.0.0", name)); - } - for dep in build.crates[name].deps.iter() { - if visited.insert(dep) { - next.push(dep); - } +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct CrateLibrustc { + compiler: Compiler, + target: Interned, + test_kind: TestKind, + krate: Option>, +} + +impl Step for CrateLibrustc { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.krate("rustc-main") + } + + fn make_run(run: RunConfig) { + let builder = run.builder; + let compiler = builder.compiler(builder.top_stage, run.host); + + let make = |name: Option>| { + let test_kind = if builder.kind == Kind::Test { + TestKind::Test + } else if builder.kind == Kind::Bench { + TestKind::Bench + } else { + panic!("unexpected builder.kind in crate: {:?}", builder.kind); + }; + + builder.ensure(CrateLibrustc { + compiler, + target: run.target, + test_kind, + krate: name, + }); + }; + + if let Some(path) = run.path { + for (name, krate_path) in builder.crates("rustc-main") { + if path.ends_with(krate_path) { + make(Some(name)); } } + } else { + make(None); } } - // The tests are going to run with the *target* libraries, so we need to - // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. - // - // Note that to run the compiler we need to run with the *host* libraries, - // but our wrapper scripts arrange for that to be the case anyway. - let mut dylib_path = dylib_path(); - dylib_path.insert(0, build.sysroot_libdir(&compiler, target)); - cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - if target.contains("emscripten") || build.remote_tested(target) { - cargo.arg("--no-run"); + fn run(self, builder: &Builder) { + builder.ensure(Crate { + compiler: self.compiler, + target: self.target, + mode: Mode::Librustc, + test_kind: self.test_kind, + krate: self.krate, + }); } +} - cargo.arg("--"); - if build.config.quiet_tests { - cargo.arg("--quiet"); - } +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Crate { + compiler: Compiler, + target: Interned, + mode: Mode, + test_kind: TestKind, + krate: Option>, +} - let _time = util::timeit(); +impl Step for Crate { + type Output = (); + const DEFAULT: bool = true; - if target.contains("emscripten") { - build.run(&mut cargo); - krate_emscripten(build, &compiler, target, mode); - } else if build.remote_tested(target) { - build.run(&mut cargo); - krate_remote(build, &compiler, target, mode); - } else { - cargo.args(&build.flags.cmd.test_args()); - try_run(build, &mut cargo); + fn should_run(run: ShouldRun) -> ShouldRun { + run.krate("std").krate("test") } -} -fn krate_emscripten(build: &Build, - compiler: &Compiler, - target: &str, - mode: Mode) { - let out_dir = build.cargo_out(compiler, mode, target); - let tests = find_tests(&out_dir.join("deps"), target); + fn make_run(run: RunConfig) { + let builder = run.builder; + let compiler = builder.compiler(builder.top_stage, run.host); + + let make = |mode: Mode, name: Option>| { + let test_kind = if builder.kind == Kind::Test { + TestKind::Test + } else if builder.kind == Kind::Bench { + TestKind::Bench + } else { + panic!("unexpected builder.kind in crate: {:?}", builder.kind); + }; + + builder.ensure(Crate { + compiler, + target: run.target, + mode, + test_kind, + krate: name, + }); + }; + + if let Some(path) = run.path { + for (name, krate_path) in builder.crates("std") { + if path.ends_with(krate_path) { + make(Mode::Libstd, Some(name)); + } + } + for (name, krate_path) in builder.crates("test") { + if path.ends_with(krate_path) { + make(Mode::Libtest, Some(name)); + } + } + } else { + make(Mode::Libstd, None); + make(Mode::Libtest, None); + } + } + + /// Run all unit tests plus documentation tests for an entire crate DAG defined + /// by a `Cargo.toml` + /// + /// This is what runs tests for crates like the standard library, compiler, etc. + /// It essentially is the driver for running `cargo test`. + /// + /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` + /// arguments, and those arguments are discovered from `cargo metadata`. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + let mode = self.mode; + let test_kind = self.test_kind; + let krate = self.krate; + + builder.ensure(compile::Test { compiler, target }); + builder.ensure(RemoteCopyLibs { compiler, target }); + + // If we're not doing a full bootstrap but we're testing a stage2 version of + // libstd, then what we're actually testing is the libstd produced in + // stage1. Reflect that here by updating the compiler that we're working + // with automatically. + let compiler = if build.force_use_stage1(compiler, target) { + builder.compiler(1, compiler.host) + } else { + compiler.clone() + }; + + let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand()); + let (name, root) = match mode { + Mode::Libstd => { + compile::std_cargo(build, &compiler, target, &mut cargo); + ("libstd", "std") + } + Mode::Libtest => { + compile::test_cargo(build, &compiler, target, &mut cargo); + ("libtest", "test") + } + Mode::Librustc => { + builder.ensure(compile::Rustc { compiler, target }); + compile::rustc_cargo(build, &compiler, target, &mut cargo); + ("librustc", "rustc-main") + } + _ => panic!("can only test libraries"), + }; + let root = INTERNER.intern_string(String::from(root)); + let _folder = build.fold_output(|| { + format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name) + }); + println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage, + &compiler.host, target); + + // Build up the base `cargo test` command. + // + // Pass in some standard flags then iterate over the graph we've discovered + // in `cargo metadata` with the maps above and figure out what `-p` + // arguments need to get passed. + if test_kind.subcommand() == "test" && !build.fail_fast { + cargo.arg("--no-fail-fast"); + } + + match krate { + Some(krate) => { + cargo.arg("-p").arg(krate); + } + None => { + let mut visited = HashSet::new(); + let mut next = vec![root]; + while let Some(name) = next.pop() { + // Right now jemalloc and the sanitizer crates are + // target-specific crate in the sense that it's not present + // on all platforms. Custom skip it here for now, but if we + // add more this probably wants to get more generalized. + // + // Also skip `build_helper` as it's not compiled normally + // for target during the bootstrap and it's just meant to be + // a helper crate, not tested. If it leaks through then it + // ends up messing with various mtime calculations and such. + if !name.contains("jemalloc") && + *name != *"build_helper" && + !(name.starts_with("rustc_") && name.ends_with("san")) { + cargo.arg("-p").arg(&format!("{}:0.0.0", name)); + } + for dep in build.crates[&name].deps.iter() { + if visited.insert(dep) { + next.push(*dep); + } + } + } + } + } + + // The tests are going to run with the *target* libraries, so we need to + // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. + // + // Note that to run the compiler we need to run with the *host* libraries, + // but our wrapper scripts arrange for that to be the case anyway. + let mut dylib_path = dylib_path(); + dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target))); + cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + + cargo.arg("--"); + cargo.args(&build.config.cmd.test_args()); - let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured"); - for test in tests { - println!("running {}", test.display()); - let mut cmd = Command::new(nodejs); - cmd.arg(&test); if build.config.quiet_tests { - cmd.arg("--quiet"); + cargo.arg("--quiet"); } - try_run(build, &mut cmd); + + let _time = util::timeit(); + + if target.contains("emscripten") { + cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), + build.config.nodejs.as_ref().expect("nodejs not configured")); + } else if build.remote_tested(target) { + cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), + format!("{} run", + builder.tool_exe(Tool::RemoteTestClient).display())); + } + try_run(build, &mut cargo); } } -fn krate_remote(build: &Build, - compiler: &Compiler, - target: &str, - mode: Mode) { - let out_dir = build.cargo_out(compiler, mode, target); - let tests = find_tests(&out_dir.join("deps"), target); +fn envify(s: &str) -> String { + s.chars().map(|c| { + match c { + '-' => '_', + c => c, + } + }).flat_map(|c| c.to_uppercase()).collect() +} - let tool = build.tool(&Compiler::new(0, &build.build), - "remote-test-client"); - for test in tests { +/// Some test suites are run inside emulators or on remote devices, and most +/// of our test binaries are linked dynamically which means we need to ship +/// the standard library and such to the emulator ahead of time. This step +/// represents this and is a dependency of all test suites. +/// +/// Most of the time this is a noop. For some steps such as shipping data to +/// QEMU we have to build our own tools so we've got conditional dependencies +/// on those programs as well. Note that the remote test client is built for +/// the build target (us) and the server is built for the target. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct RemoteCopyLibs { + compiler: Compiler, + target: Interned, +} + +impl Step for RemoteCopyLibs { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + if !build.remote_tested(target) { + return + } + + builder.ensure(compile::Test { compiler, target }); + + println!("REMOTE copy libs to emulator ({})", target); + t!(fs::create_dir_all(build.out.join("tmp"))); + + let server = builder.ensure(tool::RemoteTestServer { compiler, target }); + + // Spawn the emulator and wait for it to come online + let tool = builder.tool_exe(Tool::RemoteTestClient); let mut cmd = Command::new(&tool); - cmd.arg("run") - .arg(&test); - if build.config.quiet_tests { - cmd.arg("--quiet"); + cmd.arg("spawn-emulator") + .arg(target) + .arg(&server) + .arg(build.out.join("tmp")); + if let Some(rootfs) = build.qemu_rootfs(target) { + cmd.arg(rootfs); + } + build.run(&mut cmd); + + // Push all our dylibs to the emulator + for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) { + let f = t!(f); + let name = f.file_name().into_string().unwrap(); + if util::is_dylib(&name) { + build.run(Command::new(&tool) + .arg("push") + .arg(f.path())); + } } - cmd.args(&build.flags.cmd.test_args()); - try_run(build, &mut cmd); } } -fn find_tests(dir: &Path, target: &str) -> Vec { - let mut dst = Vec::new(); - for e in t!(dir.read_dir()).map(|e| t!(e)) { - let file_type = t!(e.file_type()); - if !file_type.is_file() { - continue +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Distcheck; + +impl Step for Distcheck { + type Output = (); + const ONLY_BUILD: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("distcheck") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Distcheck); + } + + /// Run "distcheck", a 'make check' from a tarball + fn run(self, builder: &Builder) { + let build = builder.build; + + println!("Distcheck"); + let dir = build.out.join("tmp").join("distcheck"); + let _ = fs::remove_dir_all(&dir); + t!(fs::create_dir_all(&dir)); + + // Guarantee that these are built before we begin running. + builder.ensure(dist::PlainSourceTarball); + builder.ensure(dist::Src); + + let mut cmd = Command::new("tar"); + cmd.arg("-xzf") + .arg(builder.ensure(dist::PlainSourceTarball)) + .arg("--strip-components=1") + .current_dir(&dir); + build.run(&mut cmd); + build.run(Command::new("./configure") + .args(&build.config.configure_args) + .arg("--enable-vendor") + .current_dir(&dir)); + build.run(Command::new(build_helper::make(&build.build)) + .arg("check") + .current_dir(&dir)); + + // Now make sure that rust-src has all of libstd's dependencies + println!("Distcheck rust-src"); + let dir = build.out.join("tmp").join("distcheck-src"); + let _ = fs::remove_dir_all(&dir); + t!(fs::create_dir_all(&dir)); + + let mut cmd = Command::new("tar"); + cmd.arg("-xzf") + .arg(builder.ensure(dist::Src)) + .arg("--strip-components=1") + .current_dir(&dir); + build.run(&mut cmd); + + let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml"); + build.run(Command::new(&build.initial_cargo) + .arg("generate-lockfile") + .arg("--manifest-path") + .arg(&toml) + .current_dir(&dir)); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Bootstrap; + +impl Step for Bootstrap { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD: bool = true; + + /// Test the build system itself + fn run(self, builder: &Builder) { + let build = builder.build; + let mut cmd = Command::new(&build.initial_cargo); + cmd.arg("test") + .current_dir(build.src.join("src/bootstrap")) + .env("CARGO_TARGET_DIR", build.out.join("bootstrap")) + .env("RUSTC_BOOTSTRAP", "1") + .env("RUSTC", &build.initial_rustc); + if !build.fail_fast { + cmd.arg("--no-fail-fast"); } - let filename = e.file_name().into_string().unwrap(); - if (target.contains("windows") && filename.ends_with(".exe")) || - (!target.contains("windows") && !filename.contains(".")) || - (target.contains("emscripten") && - filename.ends_with(".js") && - !filename.ends_with(".asm.js")) { - dst.push(e.path()); - } - } - dst -} - -pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) { - if !build.remote_tested(target) { - return - } - - println!("REMOTE copy libs to emulator ({})", target); - t!(fs::create_dir_all(build.out.join("tmp"))); - - let server = build.cargo_out(compiler, Mode::Tool, target) - .join(exe("remote-test-server", target)); - - // Spawn the emulator and wait for it to come online - let tool = build.tool(&Compiler::new(0, &build.build), - "remote-test-client"); - let mut cmd = Command::new(&tool); - cmd.arg("spawn-emulator") - .arg(target) - .arg(&server) - .arg(build.out.join("tmp")); - if let Some(rootfs) = build.qemu_rootfs(target) { - cmd.arg(rootfs); - } - build.run(&mut cmd); - - // Push all our dylibs to the emulator - for f in t!(build.sysroot_libdir(compiler, target).read_dir()) { - let f = t!(f); - let name = f.file_name().into_string().unwrap(); - if util::is_dylib(&name) { - build.run(Command::new(&tool) - .arg("push") - .arg(f.path())); - } - } -} - -/// Run "distcheck", a 'make check' from a tarball -pub fn distcheck(build: &Build) { - if build.build != "x86_64-unknown-linux-gnu" { - return - } - if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") { - return - } - if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") { - return - } - - println!("Distcheck"); - let dir = build.out.join("tmp").join("distcheck"); - let _ = fs::remove_dir_all(&dir); - t!(fs::create_dir_all(&dir)); - - let mut cmd = Command::new("tar"); - cmd.arg("-xzf") - .arg(dist::rust_src_location(build)) - .arg("--strip-components=1") - .current_dir(&dir); - build.run(&mut cmd); - build.run(Command::new("./configure") - .args(&build.config.configure_args) - .arg("--enable-vendor") - .current_dir(&dir)); - build.run(Command::new(build_helper::make(&build.build)) - .arg("check") - .current_dir(&dir)); - - // Now make sure that rust-src has all of libstd's dependencies - println!("Distcheck rust-src"); - let dir = build.out.join("tmp").join("distcheck-src"); - let _ = fs::remove_dir_all(&dir); - t!(fs::create_dir_all(&dir)); - - let mut cmd = Command::new("tar"); - cmd.arg("-xzf") - .arg(dist::rust_src_installer(build)) - .arg("--strip-components=1") - .current_dir(&dir); - build.run(&mut cmd); - - let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml"); - build.run(Command::new(&build.initial_cargo) - .arg("generate-lockfile") - .arg("--manifest-path") - .arg(&toml) - .current_dir(&dir)); -} - -/// Test the build system itself -pub fn bootstrap(build: &Build) { - let mut cmd = Command::new(&build.initial_cargo); - cmd.arg("test") - .current_dir(build.src.join("src/bootstrap")) - .env("CARGO_TARGET_DIR", build.out.join("bootstrap")) - .env("RUSTC_BOOTSTRAP", "1") - .env("RUSTC", &build.initial_rustc); - if !build.fail_fast { - cmd.arg("--no-fail-fast"); + cmd.arg("--").args(&build.config.cmd.test_args()); + try_run(build, &mut cmd); + } + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/bootstrap") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Bootstrap); } - cmd.arg("--").args(&build.flags.cmd.test_args()); - try_run(build, &mut cmd); } diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs index 308a0ab307..119340a019 100644 --- a/src/bootstrap/clean.rs +++ b/src/bootstrap/clean.rs @@ -26,7 +26,7 @@ pub fn clean(build: &Build) { rm_rf(&build.out.join("tmp")); rm_rf(&build.out.join("dist")); - for host in build.config.host.iter() { + for host in &build.hosts { let entries = match build.out.join(host).read_dir() { Ok(iter) => iter, Err(_) => continue, diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 5a3106c7d5..335e1690a2 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -23,31 +23,121 @@ use std::io::prelude::*; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::str; +use std::cmp::min; use build_helper::{output, mtime, up_to_date}; use filetime::FileTime; -use rustc_serialize::json; +use serde_json; -use channel::GitInfo; use util::{exe, libdir, is_dylib, copy}; use {Build, Compiler, Mode}; +use native; +use tool; -/// Build the standard library. +use cache::{INTERNER, Interned}; +use builder::{Step, RunConfig, ShouldRun, Builder}; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Std { + pub target: Interned, + pub compiler: Compiler, +} + +impl Step for Std { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/libstd").krate("std") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Std { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + target: run.target, + }); + } + + /// Build the standard library. + /// + /// This will build the standard library for a particular stage of the build + /// using the `compiler` targeting the `target` architecture. The artifacts + /// created will also be linked into the sysroot directory. + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + let compiler = self.compiler; + + builder.ensure(StartupObjects { compiler, target }); + + if build.force_use_stage1(compiler, target) { + let from = builder.compiler(1, build.build); + builder.ensure(Std { + compiler: from, + target, + }); + println!("Uplifting stage1 std ({} -> {})", from.host, target); + + // Even if we're not building std this stage, the new sysroot must + // still contain the musl startup objects. + if target.contains("musl") && !target.contains("mips") { + let libdir = builder.sysroot_libdir(compiler, target); + copy_musl_third_party_objects(build, target, &libdir); + } + + builder.ensure(StdLink { + compiler: from, + target_compiler: compiler, + target, + }); + return; + } + + let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage)); + println!("Building stage{} std artifacts ({} -> {})", compiler.stage, + &compiler.host, target); + + if target.contains("musl") && !target.contains("mips") { + let libdir = builder.sysroot_libdir(compiler, target); + copy_musl_third_party_objects(build, target, &libdir); + } + + let out_dir = build.cargo_out(compiler, Mode::Libstd, target); + build.clear_if_dirty(&out_dir, &builder.rustc(compiler)); + let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "build"); + std_cargo(build, &compiler, target, &mut cargo); + run_cargo(build, + &mut cargo, + &libstd_stamp(build, compiler, target)); + + builder.ensure(StdLink { + compiler: builder.compiler(compiler.stage, build.build), + target_compiler: compiler, + target, + }); + } +} + +/// Copies the crt(1,i,n).o startup objects /// -/// This will build the standard library for a particular stage of the build -/// using the `compiler` targeting the `target` architecture. The artifacts -/// created will also be linked into the sysroot directory. -pub fn std(build: &Build, target: &str, compiler: &Compiler) { - let libdir = build.sysroot_libdir(compiler, target); - t!(fs::create_dir_all(&libdir)); - - let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage)); - println!("Building stage{} std artifacts ({} -> {})", compiler.stage, - compiler.host, target); - - let out_dir = build.cargo_out(compiler, Mode::Libstd, target); - build.clear_if_dirty(&out_dir, &build.compiler_path(compiler)); - let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build"); +/// Since musl supports fully static linking, we can cross link for it even +/// with a glibc-targeting toolchain, given we have the appropriate startup +/// files. As those shipped with glibc won't work, copy the ones provided by +/// musl so we have them on linux-gnu hosts. +fn copy_musl_third_party_objects(build: &Build, + target: Interned, + into: &Path) { + for &obj in &["crt1.o", "crti.o", "crtn.o"] { + copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj)); + } +} + +/// Configure cargo to compile the standard library, adding appropriate env vars +/// and such. +pub fn std_cargo(build: &Build, + compiler: &Compiler, + target: Interned, + cargo: &mut Command) { let mut features = build.std_features(); if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { @@ -73,11 +163,12 @@ pub fn std(build: &Build, target: &str, compiler: &Compiler) { // config.toml equivalent) is used cargo.env("LLVM_CONFIG", build.llvm_config(target)); } + cargo.arg("--features").arg(features) - .arg("--manifest-path") - .arg(build.src.join("src/libstd/Cargo.toml")); + .arg("--manifest-path") + .arg(build.src.join("src/libstd/Cargo.toml")); - if let Some(target) = build.config.target_config.get(target) { + if let Some(target) = build.config.target_config.get(&target) { if let Some(ref jemalloc) = target.jemalloc { cargo.env("JEMALLOC_OVERRIDE", jemalloc); } @@ -87,51 +178,56 @@ pub fn std(build: &Build, target: &str, compiler: &Compiler) { cargo.env("MUSL_ROOT", p); } } - - run_cargo(build, - &mut cargo, - &libstd_stamp(build, &compiler, target)); } -/// Link all libstd rlibs/dylibs into the sysroot location. -/// -/// Links those artifacts generated by `compiler` to a the `stage` compiler's -/// sysroot for the specified `host` and `target`. -/// -/// Note that this assumes that `compiler` has already generated the libstd -/// libraries for `target`, and this method will find them in the relevant -/// output directory. -pub fn std_link(build: &Build, - compiler: &Compiler, - target_compiler: &Compiler, - target: &str) { - println!("Copying stage{} std from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - compiler.host, - target_compiler.host, - target); - let libdir = build.sysroot_libdir(target_compiler, target); - add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target)); - - if target.contains("musl") && !target.contains("mips") { - copy_musl_third_party_objects(build, target, &libdir); - } - - if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" { - // The sanitizers are only built in stage1 or above, so the dylibs will - // be missing in stage0 and causes panic. See the `std()` function above - // for reason why the sanitizers are not built in stage0. - copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir); - } +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct StdLink { + pub compiler: Compiler, + pub target_compiler: Compiler, + pub target: Interned, } -/// Copies the crt(1,i,n).o startup objects -/// -/// Only required for musl targets that statically link to libc -fn copy_musl_third_party_objects(build: &Build, target: &str, into: &Path) { - for &obj in &["crt1.o", "crti.o", "crtn.o"] { - copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj)); +impl Step for StdLink { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Link all libstd rlibs/dylibs into the sysroot location. + /// + /// Links those artifacts generated by `compiler` to a the `stage` compiler's + /// sysroot for the specified `host` and `target`. + /// + /// Note that this assumes that `compiler` has already generated the libstd + /// libraries for `target`, and this method will find them in the relevant + /// output directory. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target_compiler = self.target_compiler; + let target = self.target; + println!("Copying stage{} std from stage{} ({} -> {} / {})", + target_compiler.stage, + compiler.stage, + &compiler.host, + target_compiler.host, + target); + let libdir = builder.sysroot_libdir(target_compiler, target); + add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target)); + + if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" { + // The sanitizers are only built in stage1 or above, so the dylibs will + // be missing in stage0 and causes panic. See the `std()` function above + // for reason why the sanitizers are not built in stage0. + copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir); + } + + builder.ensure(tool::CleanTools { + compiler: target_compiler, + target, + mode: Mode::Libstd, + }); } } @@ -147,97 +243,265 @@ fn copy_apple_sanitizer_dylibs(native_dir: &Path, platform: &str, into: &Path) { } } -/// Build and prepare startup objects like rsbegin.o and rsend.o -/// -/// These are primarily used on Windows right now for linking executables/dlls. -/// They don't require any library support as they're just plain old object -/// files, so we just use the nightly snapshot compiler to always build them (as -/// no other compilers are guaranteed to be available). -pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &str) { - if !target.contains("pc-windows-gnu") { - return +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct StartupObjects { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for StartupObjects { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/rtstartup") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(StartupObjects { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + target: run.target, + }); } - let compiler = Compiler::new(0, &build.build); - let compiler_path = build.compiler_path(&compiler); - let src_dir = &build.src.join("src/rtstartup"); - let dst_dir = &build.native_dir(target).join("rtstartup"); - let sysroot_dir = &build.sysroot_libdir(for_compiler, target); - t!(fs::create_dir_all(dst_dir)); - t!(fs::create_dir_all(sysroot_dir)); - - for file in &["rsbegin", "rsend"] { - let src_file = &src_dir.join(file.to_string() + ".rs"); - let dst_file = &dst_dir.join(file.to_string() + ".o"); - if !up_to_date(src_file, dst_file) { - let mut cmd = Command::new(&compiler_path); - build.run(cmd.env("RUSTC_BOOTSTRAP", "1") - .arg("--cfg").arg(format!("stage{}", compiler.stage)) - .arg("--target").arg(target) - .arg("--emit=obj") - .arg("--out-dir").arg(dst_dir) - .arg(src_file)); + /// Build and prepare startup objects like rsbegin.o and rsend.o + /// + /// These are primarily used on Windows right now for linking executables/dlls. + /// They don't require any library support as they're just plain old object + /// files, so we just use the nightly snapshot compiler to always build them (as + /// no other compilers are guaranteed to be available). + fn run(self, builder: &Builder) { + let build = builder.build; + let for_compiler = self.compiler; + let target = self.target; + if !target.contains("pc-windows-gnu") { + return + } + + let src_dir = &build.src.join("src/rtstartup"); + let dst_dir = &build.native_dir(target).join("rtstartup"); + let sysroot_dir = &builder.sysroot_libdir(for_compiler, target); + t!(fs::create_dir_all(dst_dir)); + + for file in &["rsbegin", "rsend"] { + let src_file = &src_dir.join(file.to_string() + ".rs"); + let dst_file = &dst_dir.join(file.to_string() + ".o"); + if !up_to_date(src_file, dst_file) { + let mut cmd = Command::new(&build.initial_rustc); + build.run(cmd.env("RUSTC_BOOTSTRAP", "1") + .arg("--cfg").arg("stage0") + .arg("--target").arg(target) + .arg("--emit=obj") + .arg("-o").arg(dst_file) + .arg(src_file)); + } + + copy(dst_file, &sysroot_dir.join(file.to_string() + ".o")); } - copy(dst_file, &sysroot_dir.join(file.to_string() + ".o")); + for obj in ["crt2.o", "dllcrt2.o"].iter() { + copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj)); + } } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Test { + pub compiler: Compiler, + pub target: Interned, +} - for obj in ["crt2.o", "dllcrt2.o"].iter() { - copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj)); +impl Step for Test { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/libtest").krate("test") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Test { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + target: run.target, + }); + } + + /// Build libtest. + /// + /// This will build libtest and supporting libraries for a particular stage of + /// the build using the `compiler` targeting the `target` architecture. The + /// artifacts created will also be linked into the sysroot directory. + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + let compiler = self.compiler; + + builder.ensure(Std { compiler, target }); + + if build.force_use_stage1(compiler, target) { + builder.ensure(Test { + compiler: builder.compiler(1, build.build), + target, + }); + println!("Uplifting stage1 test ({} -> {})", &build.build, target); + builder.ensure(TestLink { + compiler: builder.compiler(1, build.build), + target_compiler: compiler, + target, + }); + return; + } + + let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage)); + println!("Building stage{} test artifacts ({} -> {})", compiler.stage, + &compiler.host, target); + let out_dir = build.cargo_out(compiler, Mode::Libtest, target); + build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target)); + let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "build"); + test_cargo(build, &compiler, target, &mut cargo); + run_cargo(build, + &mut cargo, + &libtest_stamp(build, compiler, target)); + + builder.ensure(TestLink { + compiler: builder.compiler(compiler.stage, build.build), + target_compiler: compiler, + target, + }); } } -/// Build libtest. -/// -/// This will build libtest and supporting libraries for a particular stage of -/// the build using the `compiler` targeting the `target` architecture. The -/// artifacts created will also be linked into the sysroot directory. -pub fn test(build: &Build, target: &str, compiler: &Compiler) { - let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage)); - println!("Building stage{} test artifacts ({} -> {})", compiler.stage, - compiler.host, target); - let out_dir = build.cargo_out(compiler, Mode::Libtest, target); - build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target)); - let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build"); +/// Same as `std_cargo`, but for libtest +pub fn test_cargo(build: &Build, + _compiler: &Compiler, + _target: Interned, + cargo: &mut Command) { if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") { cargo.env("MACOSX_DEPLOYMENT_TARGET", target); } cargo.arg("--manifest-path") - .arg(build.src.join("src/libtest/Cargo.toml")); - run_cargo(build, - &mut cargo, - &libtest_stamp(build, compiler, target)); + .arg(build.src.join("src/libtest/Cargo.toml")); } -/// Same as `std_link`, only for libtest -pub fn test_link(build: &Build, - compiler: &Compiler, - target_compiler: &Compiler, - target: &str) { - println!("Copying stage{} test from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - compiler.host, - target_compiler.host, - target); - add_to_sysroot(&build.sysroot_libdir(target_compiler, target), - &libtest_stamp(build, compiler, target)); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct TestLink { + pub compiler: Compiler, + pub target_compiler: Compiler, + pub target: Interned, } -/// Build the compiler. -/// -/// This will build the compiler for a particular stage of the build using -/// the `compiler` targeting the `target` architecture. The artifacts -/// created will also be linked into the sysroot directory. -pub fn rustc(build: &Build, target: &str, compiler: &Compiler) { - let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage)); - println!("Building stage{} compiler artifacts ({} -> {})", - compiler.stage, compiler.host, target); - - let out_dir = build.cargo_out(compiler, Mode::Librustc, target); - build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target)); - - let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build"); +impl Step for TestLink { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Same as `std_link`, only for libtest + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target_compiler = self.target_compiler; + let target = self.target; + println!("Copying stage{} test from stage{} ({} -> {} / {})", + target_compiler.stage, + compiler.stage, + &compiler.host, + target_compiler.host, + target); + add_to_sysroot(&builder.sysroot_libdir(target_compiler, target), + &libtest_stamp(build, compiler, target)); + builder.ensure(tool::CleanTools { + compiler: target_compiler, + target, + mode: Mode::Libtest, + }); + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Rustc { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for Rustc { + type Output = (); + const ONLY_HOSTS: bool = true; + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/librustc").krate("rustc-main") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rustc { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + target: run.target, + }); + } + + /// Build the compiler. + /// + /// This will build the compiler for a particular stage of the build using + /// the `compiler` targeting the `target` architecture. The artifacts + /// created will also be linked into the sysroot directory. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + + builder.ensure(Test { compiler, target }); + + // Build LLVM for our target. This will implicitly build the host LLVM + // if necessary. + builder.ensure(native::Llvm { target }); + + if build.force_use_stage1(compiler, target) { + builder.ensure(Rustc { + compiler: builder.compiler(1, build.build), + target, + }); + println!("Uplifting stage1 rustc ({} -> {})", &build.build, target); + builder.ensure(RustcLink { + compiler: builder.compiler(1, build.build), + target_compiler: compiler, + target, + }); + return; + } + + // Ensure that build scripts have a std to link against. + builder.ensure(Std { + compiler: builder.compiler(self.compiler.stage, build.build), + target: build.build, + }); + + let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage)); + println!("Building stage{} compiler artifacts ({} -> {})", + compiler.stage, &compiler.host, target); + + let out_dir = build.cargo_out(compiler, Mode::Librustc, target); + build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target)); + + let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build"); + rustc_cargo(build, &compiler, target, &mut cargo); + run_cargo(build, + &mut cargo, + &librustc_stamp(build, compiler, target)); + + builder.ensure(RustcLink { + compiler: builder.compiler(compiler.stage, build.build), + target_compiler: compiler, + target, + }); + } +} + +/// Same as `std_cargo`, but for libtest +pub fn rustc_cargo(build: &Build, + compiler: &Compiler, + target: Interned, + cargo: &mut Command) { cargo.arg("--features").arg(build.rustc_features()) .arg("--manifest-path") .arg(build.src.join("src/rustc/Cargo.toml")); @@ -252,7 +516,8 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) { if compiler.stage == 0 { cargo.env("CFG_LIBDIR_RELATIVE", "lib"); } else { - let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib")); + let libdir_relative = + build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib")); cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative); } @@ -277,7 +542,7 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) { cargo.env("LLVM_RUSTLLVM", "1"); } cargo.env("LLVM_CONFIG", build.llvm_config(target)); - let target_config = build.config.target_config.get(target); + let target_config = build.config.target_config.get(&target); if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { cargo.env("CFG_LLVM_ROOT", s); } @@ -298,41 +563,59 @@ pub fn rustc(build: &Build, target: &str, compiler: &Compiler) { if let Some(ref s) = build.config.rustc_default_ar { cargo.env("CFG_DEFAULT_AR", s); } - run_cargo(build, - &mut cargo, - &librustc_stamp(build, compiler, target)); } -/// Same as `std_link`, only for librustc -pub fn rustc_link(build: &Build, - compiler: &Compiler, - target_compiler: &Compiler, - target: &str) { - println!("Copying stage{} rustc from stage{} ({} -> {} / {})", - target_compiler.stage, - compiler.stage, - compiler.host, - target_compiler.host, - target); - add_to_sysroot(&build.sysroot_libdir(target_compiler, target), - &librustc_stamp(build, compiler, target)); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +struct RustcLink { + pub compiler: Compiler, + pub target_compiler: Compiler, + pub target: Interned, +} + +impl Step for RustcLink { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Same as `std_link`, only for librustc + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target_compiler = self.target_compiler; + let target = self.target; + println!("Copying stage{} rustc from stage{} ({} -> {} / {})", + target_compiler.stage, + compiler.stage, + &compiler.host, + target_compiler.host, + target); + add_to_sysroot(&builder.sysroot_libdir(target_compiler, target), + &librustc_stamp(build, compiler, target)); + builder.ensure(tool::CleanTools { + compiler: target_compiler, + target, + mode: Mode::Librustc, + }); + } } /// Cargo's output path for the standard library in a given stage, compiled /// by a particular compiler for the specified target. -fn libstd_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf { +pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { build.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp") } /// Cargo's output path for libtest in a given stage, compiled by a particular /// compiler for the specified target. -fn libtest_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf { +pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp") } /// Cargo's output path for librustc in a given stage, compiled by a particular /// compiler for the specified target. -fn librustc_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf { +pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned) -> PathBuf { build.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp") } @@ -342,60 +625,132 @@ fn compiler_file(compiler: &Path, file: &str) -> PathBuf { PathBuf::from(out.trim()) } -pub fn create_sysroot(build: &Build, compiler: &Compiler) { - let sysroot = build.sysroot(compiler); - let _ = fs::remove_dir_all(&sysroot); - t!(fs::create_dir_all(&sysroot)); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Sysroot { + pub compiler: Compiler, } -/// Prepare a new compiler from the artifacts in `stage` -/// -/// This will assemble a compiler in `build/$host/stage$stage`. The compiler -/// must have been previously produced by the `stage - 1` build.build -/// compiler. -pub fn assemble_rustc(build: &Build, stage: u32, host: &str) { - // nothing to do in stage0 - if stage == 0 { - return +impl Step for Sysroot { + type Output = Interned; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Returns the sysroot for the `compiler` specified that *this build system + /// generates*. + /// + /// That is, the sysroot for the stage0 compiler is not what the compiler + /// thinks it is by default, but it's the same as the default for stages + /// 1-3. + fn run(self, builder: &Builder) -> Interned { + let build = builder.build; + let compiler = self.compiler; + let sysroot = if compiler.stage == 0 { + build.out.join(&compiler.host).join("stage0-sysroot") + } else { + build.out.join(&compiler.host).join(format!("stage{}", compiler.stage)) + }; + let _ = fs::remove_dir_all(&sysroot); + t!(fs::create_dir_all(&sysroot)); + INTERNER.intern_path(sysroot) } +} - println!("Copying stage{} compiler ({})", stage, host); +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct Assemble { + /// The compiler which we will produce in this step. Assemble itself will + /// take care of ensuring that the necessary prerequisites to do so exist, + /// that is, this target can be a stage2 compiler and Assemble will build + /// previous stages for you. + pub target_compiler: Compiler, +} - // The compiler that we're assembling - let target_compiler = Compiler::new(stage, host); +impl Step for Assemble { + type Output = Compiler; - // The compiler that compiled the compiler we're assembling - let build_compiler = Compiler::new(stage - 1, &build.build); + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/rustc") + } - // Link in all dylibs to the libdir - let sysroot = build.sysroot(&target_compiler); - let sysroot_libdir = sysroot.join(libdir(host)); - t!(fs::create_dir_all(&sysroot_libdir)); - let src_libdir = build.sysroot_libdir(&build_compiler, host); - for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) { - let filename = f.file_name().into_string().unwrap(); - if is_dylib(&filename) { - copy(&f.path(), &sysroot_libdir.join(&filename)); + /// Prepare a new compiler from the artifacts in `stage` + /// + /// This will assemble a compiler in `build/$host/stage$stage`. The compiler + /// must have been previously produced by the `stage - 1` build.build + /// compiler. + fn run(self, builder: &Builder) -> Compiler { + let build = builder.build; + let target_compiler = self.target_compiler; + + if target_compiler.stage == 0 { + assert_eq!(build.build, target_compiler.host, + "Cannot obtain compiler for non-native build triple at stage 0"); + // The stage 0 compiler for the build triple is always pre-built. + return target_compiler; } - } - let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host); + // Get the compiler that we'll use to bootstrap ourselves. + let build_compiler = if target_compiler.host != build.build { + // Build a compiler for the host platform. We cannot use the stage0 + // compiler for the host platform for this because it doesn't have + // the libraries we need. FIXME: Perhaps we should download those + // libraries? It would make builds faster... + // FIXME: It may be faster if we build just a stage 1 + // compiler and then use that to bootstrap this compiler + // forward. + builder.compiler(target_compiler.stage - 1, build.build) + } else { + // Build the compiler we'll use to build the stage requested. This + // may build more than one compiler (going down to stage 0). + builder.compiler(target_compiler.stage - 1, target_compiler.host) + }; - // Link the compiler binary itself into place - let rustc = out_dir.join(exe("rustc", host)); - let bindir = sysroot.join("bin"); - t!(fs::create_dir_all(&bindir)); - let compiler = build.compiler_path(&target_compiler); - let _ = fs::remove_file(&compiler); - copy(&rustc, &compiler); + // Build the libraries for this compiler to link to (i.e., the libraries + // it uses at runtime). NOTE: Crates the target compiler compiles don't + // link to these. (FIXME: Is that correct? It seems to be correct most + // of the time but I think we do link to these for stage2/bin compilers + // when not performing a full bootstrap). + if builder.build.config.keep_stage.map_or(false, |s| target_compiler.stage <= s) { + builder.verbose("skipping compilation of compiler due to --keep-stage"); + let compiler = build_compiler; + for stage in 0..min(target_compiler.stage, builder.config.keep_stage.unwrap()) { + let target_compiler = builder.compiler(stage, target_compiler.host); + let target = target_compiler.host; + builder.ensure(StdLink { compiler, target_compiler, target }); + builder.ensure(TestLink { compiler, target_compiler, target }); + builder.ensure(RustcLink { compiler, target_compiler, target }); + } + } else { + builder.ensure(Rustc { compiler: build_compiler, target: target_compiler.host }); + } + + let stage = target_compiler.stage; + let host = target_compiler.host; + println!("Assembling stage{} compiler ({})", stage, host); + + // Link in all dylibs to the libdir + let sysroot = builder.sysroot(target_compiler); + let sysroot_libdir = sysroot.join(libdir(&*host)); + t!(fs::create_dir_all(&sysroot_libdir)); + let src_libdir = builder.sysroot_libdir(build_compiler, host); + for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) { + let filename = f.file_name().into_string().unwrap(); + if is_dylib(&filename) { + copy(&f.path(), &sysroot_libdir.join(&filename)); + } + } - // See if rustdoc exists to link it into place - let rustdoc = exe("rustdoc", host); - let rustdoc_src = out_dir.join(&rustdoc); - let rustdoc_dst = bindir.join(&rustdoc); - if fs::metadata(&rustdoc_src).is_ok() { - let _ = fs::remove_file(&rustdoc_dst); - copy(&rustdoc_src, &rustdoc_dst); + let out_dir = build.cargo_out(build_compiler, Mode::Librustc, host); + + // Link the compiler binary itself into place + let rustc = out_dir.join(exe("rustc", &*host)); + let bindir = sysroot.join("bin"); + t!(fs::create_dir_all(&bindir)); + let compiler = builder.rustc(target_compiler); + let _ = fs::remove_file(&compiler); + copy(&rustc, &compiler); + + target_compiler } } @@ -418,64 +773,6 @@ fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) { } } -/// Build a tool in `src/tools` -/// -/// This will build the specified tool with the specified `host` compiler in -/// `stage` into the normal cargo output directory. -pub fn maybe_clean_tools(build: &Build, stage: u32, target: &str, mode: Mode) { - let compiler = Compiler::new(stage, &build.build); - - let stamp = match mode { - Mode::Libstd => libstd_stamp(build, &compiler, target), - Mode::Libtest => libtest_stamp(build, &compiler, target), - Mode::Librustc => librustc_stamp(build, &compiler, target), - _ => panic!(), - }; - let out_dir = build.cargo_out(&compiler, Mode::Tool, target); - build.clear_if_dirty(&out_dir, &stamp); -} - -/// Build a tool in `src/tools` -/// -/// This will build the specified tool with the specified `host` compiler in -/// `stage` into the normal cargo output directory. -pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) { - let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool)); - println!("Building stage{} tool {} ({})", stage, tool, target); - - let compiler = Compiler::new(stage, &build.build); - - let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build"); - let dir = build.src.join("src/tools").join(tool); - cargo.arg("--manifest-path").arg(dir.join("Cargo.toml")); - - // We don't want to build tools dynamically as they'll be running across - // stages and such and it's just easier if they're not dynamically linked. - cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); - - if let Some(dir) = build.openssl_install_dir(target) { - cargo.env("OPENSSL_STATIC", "1"); - cargo.env("OPENSSL_DIR", dir); - cargo.env("LIBZ_SYS_STATIC", "1"); - } - - cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel); - - let info = GitInfo::new(&dir); - if let Some(sha) = info.sha() { - cargo.env("CFG_COMMIT_HASH", sha); - } - if let Some(sha_short) = info.sha_short() { - cargo.env("CFG_SHORT_COMMIT_HASH", sha_short); - } - if let Some(date) = info.commit_date() { - cargo.env("CFG_COMMIT_DATE", date); - } - - build.run(&mut cargo); -} - - // Avoiding a dependency on winapi to keep compile times down #[cfg(unix)] fn stderr_isatty() -> bool { @@ -535,18 +832,18 @@ fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) { let stdout = BufReader::new(child.stdout.take().unwrap()); for line in stdout.lines() { let line = t!(line); - let json = if line.starts_with("{") { - t!(line.parse::()) + let json: serde_json::Value = if line.starts_with("{") { + t!(serde_json::from_str(&line)) } else { // If this was informational, just print it out and continue println!("{}", line); continue }; - if json.find("reason").and_then(|j| j.as_string()) != Some("compiler-artifact") { + if json["reason"].as_str() != Some("compiler-artifact") { continue } for filename in json["filenames"].as_array().unwrap() { - let filename = filename.as_string().unwrap(); + let filename = filename.as_str().unwrap(); // Skip files like executables if !filename.ends_with(".rlib") && !filename.ends_with(".lib") && diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 34628852ab..f43035fbfe 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -19,11 +19,14 @@ use std::fs::{self, File}; use std::io::prelude::*; use std::path::PathBuf; use std::process; +use std::cmp; use num_cpus; -use rustc_serialize::Decodable; -use toml::{Parser, Decoder, Value}; +use toml; use util::{exe, push_exe_path}; +use cache::{INTERNER, Interned}; +use flags::Flags; +pub use flags::Subcommand; /// Global configuration for the entire build and/or bootstrap. /// @@ -35,7 +38,7 @@ use util::{exe, push_exe_path}; /// Note that this structure is not decoded directly into, but rather it is /// filled out from the decoded forms of the structs below. For documentation /// each field, see the corresponding fields in -/// `src/bootstrap/config.toml.example`. +/// `config.toml.example`. #[derive(Default)] pub struct Config { pub ccache: Option, @@ -46,13 +49,25 @@ pub struct Config { pub docs: bool, pub locked_deps: bool, pub vendor: bool, - pub target_config: HashMap, + pub target_config: HashMap, Target>, pub full_bootstrap: bool, pub extended: bool, pub sanitizers: bool, pub profiler: bool, + pub ignore_git: bool, + + pub run_host_only: bool, + + pub on_fail: Option, + pub stage: Option, + pub keep_stage: Option, + pub src: PathBuf, + pub jobs: Option, + pub cmd: Subcommand, + pub incremental: bool, // llvm codegen options + pub llvm_enabled: bool, pub llvm_assertions: bool, pub llvm_optimize: bool, pub llvm_release_debuginfo: bool, @@ -62,7 +77,6 @@ pub struct Config { pub llvm_targets: Option, pub llvm_experimental_targets: Option, pub llvm_link_jobs: Option, - pub llvm_clean_rebuild: bool, // rust codegen options pub rust_optimize: bool, @@ -78,9 +92,9 @@ pub struct Config { pub rust_debuginfo_tests: bool, pub rust_dist_src: bool, - pub build: String, - pub host: Vec, - pub target: Vec, + pub build: Interned, + pub hosts: Vec>, + pub targets: Vec>, pub local_rebuild: bool, // dist misc @@ -129,6 +143,7 @@ pub struct Target { pub cc: Option, pub cxx: Option, pub ndk: Option, + pub crt_static: Option, pub musl_root: Option, pub qemu_rootfs: Option, } @@ -138,7 +153,8 @@ pub struct Target { /// This structure uses `Decodable` to automatically decode a TOML configuration /// file into this format, and then this is traversed and written into the above /// `Config` structure. -#[derive(RustcDecodable, Default)] +#[derive(Deserialize, Default)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] struct TomlConfig { build: Option, install: Option, @@ -149,10 +165,13 @@ struct TomlConfig { } /// TOML representation of various global build decisions. -#[derive(RustcDecodable, Default, Clone)] +#[derive(Deserialize, Default, Clone)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] struct Build { build: Option, + #[serde(default)] host: Vec, + #[serde(default)] target: Vec, cargo: Option, rustc: Option, @@ -174,7 +193,8 @@ struct Build { } /// TOML representation of various global install decisions. -#[derive(RustcDecodable, Default, Clone)] +#[derive(Deserialize, Default, Clone)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] struct Install { prefix: Option, sysconfdir: Option, @@ -185,8 +205,10 @@ struct Install { } /// TOML representation of how the LLVM build is configured. -#[derive(RustcDecodable, Default)] +#[derive(Deserialize, Default)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] struct Llvm { + enabled: Option, ccache: Option, ninja: Option, assertions: Option, @@ -197,10 +219,10 @@ struct Llvm { targets: Option, experimental_targets: Option, link_jobs: Option, - clean_rebuild: Option, } -#[derive(RustcDecodable, Default, Clone)] +#[derive(Deserialize, Default, Clone)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] struct Dist { sign_folder: Option, gpg_password_file: Option, @@ -208,7 +230,8 @@ struct Dist { src_tarball: Option, } -#[derive(RustcDecodable)] +#[derive(Deserialize)] +#[serde(untagged)] enum StringOrBool { String(String), Bool(bool), @@ -221,7 +244,8 @@ impl Default for StringOrBool { } /// TOML representation of how the Rust build is configured. -#[derive(RustcDecodable, Default)] +#[derive(Deserialize, Default)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] struct Rust { optimize: Option, codegen_units: Option, @@ -240,23 +264,29 @@ struct Rust { optimize_tests: Option, debuginfo_tests: Option, codegen_tests: Option, + ignore_git: Option, } /// TOML representation of how each build target is configured. -#[derive(RustcDecodable, Default)] +#[derive(Deserialize, Default)] +#[serde(deny_unknown_fields, rename_all = "kebab-case")] struct TomlTarget { llvm_config: Option, jemalloc: Option, cc: Option, cxx: Option, android_ndk: Option, + crt_static: Option, musl_root: Option, qemu_rootfs: Option, } impl Config { - pub fn parse(build: &str, file: Option) -> Config { + pub fn parse(args: &[String]) -> Config { + let flags = Flags::parse(&args); + let file = flags.config.clone(); let mut config = Config::default(); + config.llvm_enabled = true; config.llvm_optimize = true; config.use_jemalloc = true; config.backtrace = true; @@ -266,52 +296,69 @@ impl Config { config.docs = true; config.rust_rpath = true; config.rust_codegen_units = 1; - config.build = build.to_string(); config.channel = "dev".to_string(); config.codegen_tests = true; + config.ignore_git = false; config.rust_dist_src = true; + config.on_fail = flags.on_fail; + config.stage = flags.stage; + config.src = flags.src; + config.jobs = flags.jobs; + config.cmd = flags.cmd; + config.incremental = flags.incremental; + config.keep_stage = flags.keep_stage; + + // If --target was specified but --host wasn't specified, don't run any host-only tests. + config.run_host_only = flags.host.is_empty() && !flags.target.is_empty(); + let toml = file.map(|file| { let mut f = t!(File::open(&file)); - let mut toml = String::new(); - t!(f.read_to_string(&mut toml)); - let mut p = Parser::new(&toml); - let table = match p.parse() { - Some(table) => table, - None => { - println!("failed to parse TOML configuration '{}':", file.to_str().unwrap()); - for err in p.errors.iter() { - let (loline, locol) = p.to_linecol(err.lo); - let (hiline, hicol) = p.to_linecol(err.hi); - println!("{}:{}-{}:{}: {}", loline, locol, hiline, - hicol, err.desc); - } - process::exit(2); - } - }; - let mut d = Decoder::new(Value::Table(table)); - match Decodable::decode(&mut d) { - Ok(cfg) => cfg, - Err(e) => { - println!("failed to decode TOML: {}", e); + let mut contents = String::new(); + t!(f.read_to_string(&mut contents)); + match toml::from_str(&contents) { + Ok(table) => table, + Err(err) => { + println!("failed to parse TOML configuration '{}': {}", + file.display(), err); process::exit(2); } } }).unwrap_or_else(|| TomlConfig::default()); let build = toml.build.clone().unwrap_or(Build::default()); - set(&mut config.build, build.build.clone()); - config.host.push(config.build.clone()); + set(&mut config.build, build.build.clone().map(|x| INTERNER.intern_string(x))); + set(&mut config.build, flags.build); + if config.build.is_empty() { + // set by bootstrap.py + config.build = INTERNER.intern_str(&env::var("BUILD").unwrap()); + } + config.hosts.push(config.build.clone()); for host in build.host.iter() { - if !config.host.contains(host) { - config.host.push(host.clone()); + let host = INTERNER.intern_str(host); + if !config.hosts.contains(&host) { + config.hosts.push(host); } } - for target in config.host.iter().chain(&build.target) { - if !config.target.contains(target) { - config.target.push(target.clone()); + for target in config.hosts.iter().cloned() + .chain(build.target.iter().map(|s| INTERNER.intern_str(s))) + { + if !config.targets.contains(&target) { + config.targets.push(target); } } + config.hosts = if !flags.host.is_empty() { + flags.host + } else { + config.hosts + }; + config.targets = if !flags.target.is_empty() { + flags.target + } else { + config.targets + }; + + config.nodejs = build.nodejs.map(PathBuf::from); config.gdb = build.gdb.map(PathBuf::from); config.python = build.python.map(PathBuf::from); @@ -327,6 +374,7 @@ impl Config { set(&mut config.sanitizers, build.sanitizers); set(&mut config.profiler, build.profiler); set(&mut config.openssl_static, build.openssl_static); + config.verbose = cmp::max(config.verbose, flags.verbose); if let Some(ref install) = toml.install { config.prefix = install.prefix.clone().map(PathBuf::from); @@ -348,12 +396,12 @@ impl Config { Some(StringOrBool::Bool(false)) | None => {} } set(&mut config.ninja, llvm.ninja); + set(&mut config.llvm_enabled, llvm.enabled); set(&mut config.llvm_assertions, llvm.assertions); set(&mut config.llvm_optimize, llvm.optimize); set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo); set(&mut config.llvm_version_check, llvm.version_check); set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); - set(&mut config.llvm_clean_rebuild, llvm.clean_rebuild); config.llvm_targets = llvm.targets.clone(); config.llvm_experimental_targets = llvm.experimental_targets.clone(); config.llvm_link_jobs = llvm.link_jobs; @@ -373,6 +421,7 @@ impl Config { set(&mut config.use_jemalloc, rust.use_jemalloc); set(&mut config.backtrace, rust.backtrace); set(&mut config.channel, rust.channel.clone()); + set(&mut config.ignore_git, rust.ignore_git); config.rustc_default_linker = rust.default_linker.clone(); config.rustc_default_ar = rust.default_ar.clone(); config.musl_root = rust.musl_root.clone().map(PathBuf::from); @@ -399,10 +448,11 @@ impl Config { } target.cxx = cfg.cxx.clone().map(PathBuf::from); target.cc = cfg.cc.clone().map(PathBuf::from); + target.crt_static = cfg.crt_static.clone(); target.musl_root = cfg.musl_root.clone().map(PathBuf::from); target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from); - config.target_config.insert(triple.clone(), target); + config.target_config.insert(INTERNER.intern_string(triple.clone()), target); } } @@ -478,7 +528,6 @@ impl Config { ("LLVM_VERSION_CHECK", self.llvm_version_check), ("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp), ("LLVM_LINK_SHARED", self.llvm_link_shared), - ("LLVM_CLEAN_REBUILD", self.llvm_clean_rebuild), ("OPTIMIZE", self.rust_optimize), ("DEBUG_ASSERTIONS", self.rust_debug_assertions), ("DEBUGINFO", self.rust_debuginfo), @@ -504,13 +553,13 @@ impl Config { } match key { - "CFG_BUILD" if value.len() > 0 => self.build = value.to_string(), + "CFG_BUILD" if value.len() > 0 => self.build = INTERNER.intern_str(value), "CFG_HOST" if value.len() > 0 => { - self.host.extend(value.split(" ").map(|s| s.to_string())); + self.hosts.extend(value.split(" ").map(|s| INTERNER.intern_str(s))); } "CFG_TARGET" if value.len() > 0 => { - self.target.extend(value.split(" ").map(|s| s.to_string())); + self.targets.extend(value.split(" ").map(|s| INTERNER.intern_str(s))); } "CFG_EXPERIMENTAL_TARGETS" if value.len() > 0 => { self.llvm_experimental_targets = Some(value.to_string()); @@ -519,33 +568,28 @@ impl Config { self.musl_root = Some(parse_configure_path(value)); } "CFG_MUSL_ROOT_X86_64" if value.len() > 0 => { - let target = "x86_64-unknown-linux-musl".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("x86_64-unknown-linux-musl"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.musl_root = Some(parse_configure_path(value)); } "CFG_MUSL_ROOT_I686" if value.len() > 0 => { - let target = "i686-unknown-linux-musl".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("i686-unknown-linux-musl"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.musl_root = Some(parse_configure_path(value)); } "CFG_MUSL_ROOT_ARM" if value.len() > 0 => { - let target = "arm-unknown-linux-musleabi".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("arm-unknown-linux-musleabi"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.musl_root = Some(parse_configure_path(value)); } "CFG_MUSL_ROOT_ARMHF" if value.len() > 0 => { - let target = "arm-unknown-linux-musleabihf".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("arm-unknown-linux-musleabihf"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.musl_root = Some(parse_configure_path(value)); } "CFG_MUSL_ROOT_ARMV7" if value.len() > 0 => { - let target = "armv7-unknown-linux-musleabihf".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("armv7-unknown-linux-musleabihf"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.musl_root = Some(parse_configure_path(value)); } "CFG_DEFAULT_AR" if value.len() > 0 => { @@ -593,33 +637,28 @@ impl Config { target.jemalloc = Some(parse_configure_path(value).join("libjemalloc_pic.a")); } "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => { - let target = "arm-linux-androideabi".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("arm-linux-androideabi"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.ndk = Some(parse_configure_path(value)); } "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => { - let target = "armv7-linux-androideabi".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("armv7-linux-androideabi"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.ndk = Some(parse_configure_path(value)); } "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => { - let target = "i686-linux-android".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("i686-linux-android"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.ndk = Some(parse_configure_path(value)); } "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => { - let target = "aarch64-linux-android".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("aarch64-linux-android"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.ndk = Some(parse_configure_path(value)); } "CFG_X86_64_LINUX_ANDROID_NDK" if value.len() > 0 => { - let target = "x86_64-linux-android".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("x86_64-linux-android"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.ndk = Some(parse_configure_path(value)); } "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => { @@ -643,9 +682,13 @@ impl Config { .collect(); } "CFG_QEMU_ARMHF_ROOTFS" if value.len() > 0 => { - let target = "arm-unknown-linux-gnueabihf".to_string(); - let target = self.target_config.entry(target) - .or_insert(Target::default()); + let target = INTERNER.intern_str("arm-unknown-linux-gnueabihf"); + let target = self.target_config.entry(target).or_insert(Target::default()); + target.qemu_rootfs = Some(parse_configure_path(value)); + } + "CFG_QEMU_AARCH64_ROOTFS" if value.len() > 0 => { + let target = INTERNER.intern_str("aarch64-unknown-linux-gnu"); + let target = self.target_config.entry(target).or_insert(Target::default()); target.qemu_rootfs = Some(parse_configure_path(value)); } _ => {} diff --git a/src/bootstrap/config.toml.example b/src/bootstrap/config.toml.example deleted file mode 100644 index 7a52222e46..0000000000 --- a/src/bootstrap/config.toml.example +++ /dev/null @@ -1,333 +0,0 @@ -# Sample TOML configuration file for building Rust. -# -# To configure rustbuild, copy this file to the directory from which you will be -# running the build, and name it config.toml. -# -# All options are commented out by default in this file, and they're commented -# out with their default values. The build system by default looks for -# `config.toml` in the current directory of a build for build configuration, but -# a custom configuration file can also be specified with `--config` to the build -# system. - -# ============================================================================= -# Tweaking how LLVM is compiled -# ============================================================================= -[llvm] - -# Indicates whether the LLVM build is a Release or Debug build -#optimize = true - -# Indicates whether an LLVM Release build should include debug info -#release-debuginfo = false - -# Indicates whether the LLVM assertions are enabled or not -#assertions = false - -# Indicates whether ccache is used when building LLVM -#ccache = false -# or alternatively ... -#ccache = "/path/to/ccache" - -# If an external LLVM root is specified, we automatically check the version by -# default to make sure it's within the range that we're expecting, but setting -# this flag will indicate that this version check should not be done. -#version-check = false - -# Link libstdc++ statically into the librustc_llvm instead of relying on a -# dynamic version to be available. -#static-libstdcpp = false - -# Tell the LLVM build system to use Ninja instead of the platform default for -# the generated build system. This can sometimes be faster than make, for -# example. -#ninja = false - -# LLVM targets to build support for. -# Note: this is NOT related to Rust compilation targets. However, as Rust is -# dependent on LLVM for code generation, turning targets off here WILL lead to -# the resulting rustc being unable to compile for the disabled architectures. -# Also worth pointing out is that, in case support for new targets are added to -# LLVM, enabling them here doesn't mean Rust is automatically gaining said -# support. You'll need to write a target specification at least, and most -# likely, teach rustc about the C ABI of the target. Get in touch with the -# Rust team and file an issue if you need assistance in porting! -#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon" - -# LLVM experimental targets to build support for. These targets are specified in -# the same format as above, but since these targets are experimental, they are -# not built by default and the experimental Rust compilation targets that depend -# on them will not work unless the user opts in to building them. Possible -# experimental LLVM targets include WebAssembly for the -# wasm32-experimental-emscripten Rust target. -#experimental-targets = "" - -# Cap the number of parallel linker invocations when compiling LLVM. -# This can be useful when building LLVM with debug info, which significantly -# increases the size of binaries and consequently the memory required by -# each linker process. -# If absent or 0, linker invocations are treated like any other job and -# controlled by rustbuild's -j parameter. -#link-jobs = 0 - -# Delete LLVM build directory on LLVM rebuild. -# This option defaults to `false` for local development, but CI may want to -# always perform clean full builds (possibly accelerated by (s)ccache). -#clean-rebuild = false - -# ============================================================================= -# General build configuration options -# ============================================================================= -[build] - -# Build triple for the original snapshot compiler. This must be a compiler that -# nightlies are already produced for. The current platform must be able to run -# binaries of this build triple and the nightly will be used to bootstrap the -# first compiler. -#build = "x86_64-unknown-linux-gnu" # defaults to your host platform - -# In addition to the build triple, other triples to produce full compiler -# toolchains for. Each of these triples will be bootstrapped from the build -# triple and then will continue to bootstrap themselves. This platform must -# currently be able to run all of the triples provided here. -#host = ["x86_64-unknown-linux-gnu"] # defaults to just the build triple - -# In addition to all host triples, other triples to produce the standard library -# for. Each host triple will be used to produce a copy of the standard library -# for each target triple. -#target = ["x86_64-unknown-linux-gnu"] # defaults to just the build triple - -# Instead of downloading the src/stage0.txt version of Cargo specified, use -# this Cargo binary instead to build all Rust code -#cargo = "/path/to/bin/cargo" - -# Instead of downloading the src/stage0.txt version of the compiler -# specified, use this rustc binary instead as the stage0 snapshot compiler. -#rustc = "/path/to/bin/rustc" - -# Flag to specify whether any documentation is built. If false, rustdoc and -# friends will still be compiled but they will not be used to generate any -# documentation. -#docs = true - -# Indicate whether the compiler should be documented in addition to the standard -# library and facade crates. -#compiler-docs = false - -# Indicate whether submodules are managed and updated automatically. -#submodules = true - -# The path to (or name of) the GDB executable to use. This is only used for -# executing the debuginfo test suite. -#gdb = "gdb" - -# The node.js executable to use. Note that this is only used for the emscripten -# target when running tests, otherwise this can be omitted. -#nodejs = "node" - -# Python interpreter to use for various tasks throughout the build, notably -# rustdoc tests, the lldb python interpreter, and some dist bits and pieces. -# Note that Python 2 is currently required. -#python = "python2.7" - -# Force Cargo to check that Cargo.lock describes the precise dependency -# set that all the Cargo.toml files create, instead of updating it. -#locked-deps = false - -# Indicate whether the vendored sources are used for Rust dependencies or not -#vendor = false - -# Typically the build system will build the rust compiler twice. The second -# compiler, however, will simply use its own libraries to link against. If you -# would rather to perform a full bootstrap, compiling the compiler three times, -# then you can set this option to true. You shouldn't ever need to set this -# option to true. -#full-bootstrap = false - -# Enable a build of the and extended rust tool set which is not only the -# compiler but also tools such as Cargo. This will also produce "combined -# installers" which are used to install Rust and Cargo together. This is -# disabled by default. -#extended = false - -# Verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose -#verbose = 0 - -# Build the sanitizer runtimes -#sanitizers = false - -# Build the profiler runtime -#profiler = false - -# Indicates whether the OpenSSL linked into Cargo will be statically linked or -# not. If static linkage is specified then the build system will download a -# known-good version of OpenSSL, compile it, and link it to Cargo. -#openssl-static = false - -# Run the build with low priority, by setting the process group's "nice" value -# to +10 on Unix platforms, and by using a "low priority" job object on Windows. -#low-priority = false - -# ============================================================================= -# General install configuration options -# ============================================================================= -[install] - -# Instead of installing to /usr/local, install to this path instead. -#prefix = "/usr/local" - -# Where to install system configuration files -# If this is a relative path, it will get installed in `prefix` above -#sysconfdir = "/etc" - -# Where to install documentation in `prefix` above -#docdir = "share/doc/rust" - -# Where to install binaries in `prefix` above -#bindir = "bin" - -# Where to install libraries in `prefix` above -#libdir = "lib" - -# Where to install man pages in `prefix` above -#mandir = "share/man" - -# ============================================================================= -# Options for compiling Rust code itself -# ============================================================================= -[rust] - -# Whether or not to optimize the compiler and standard library -# Note: the slowness of the non optimized compiler compiling itself usually -# outweighs the time gains in not doing optimizations, therefore a -# full bootstrap takes much more time with optimize set to false. -#optimize = true - -# Number of codegen units to use for each compiler invocation. A value of 0 -# means "the number of cores on this machine", and 1+ is passed through to the -# compiler. -#codegen-units = 1 - -# Whether or not debug assertions are enabled for the compiler and standard -# library -#debug-assertions = false - -# Whether or not debuginfo is emitted -#debuginfo = false - -# Whether or not line number debug information is emitted -#debuginfo-lines = false - -# Whether or not to only build debuginfo for the standard library if enabled. -# If enabled, this will not compile the compiler with debuginfo, just the -# standard library. -#debuginfo-only-std = false - -# Whether or not jemalloc is built and enabled -#use-jemalloc = true - -# Whether or not jemalloc is built with its debug option set -#debug-jemalloc = false - -# Whether or not `panic!`s generate backtraces (RUST_BACKTRACE) -#backtrace = true - -# The default linker that will be used by the generated compiler. Note that this -# is not the linker used to link said compiler. -#default-linker = "cc" - -# The default ar utility that will be used by the generated compiler if LLVM -# cannot be used. Note that this is not used to assemble said compiler. -#default-ar = "ar" - -# The "channel" for the Rust build to produce. The stable/beta channels only -# allow using stable features, whereas the nightly and dev channels allow using -# nightly features -#channel = "dev" - -# By default the `rustc` executable is built with `-Wl,-rpath` flags on Unix -# platforms to ensure that the compiler is usable by default from the build -# directory (as it links to a number of dynamic libraries). This may not be -# desired in distributions, for example. -#rpath = true - -# Flag indicating whether tests are compiled with optimizations (the -O flag) or -# with debuginfo (the -g flag) -#optimize-tests = true -#debuginfo-tests = true - -# Flag indicating whether codegen tests will be run or not. If you get an error -# saying that the FileCheck executable is missing, you may want to disable this. -#codegen-tests = true - -# ============================================================================= -# Options for specific targets -# -# Each of the following options is scoped to the specific target triple in -# question and is used for determining how to compile each target. -# ============================================================================= -[target.x86_64-unknown-linux-gnu] - -# C compiler to be used to compiler C code and link Rust code. Note that the -# default value is platform specific, and if not specified it may also depend on -# what platform is crossing to what platform. -#cc = "cc" - -# C++ compiler to be used to compiler C++ code (e.g. LLVM and our LLVM shims). -# This is only used for host targets. -#cxx = "c++" - -# Path to the `llvm-config` binary of the installation of a custom LLVM to link -# against. Note that if this is specifed we don't compile LLVM at all for this -# target. -#llvm-config = "../path/to/llvm/root/bin/llvm-config" - -# Path to the custom jemalloc static library to link into the standard library -# by default. This is only used if jemalloc is still enabled above -#jemalloc = "/path/to/jemalloc/libjemalloc_pic.a" - -# If this target is for Android, this option will be required to specify where -# the NDK for the target lives. This is used to find the C compiler to link and -# build native code. -#android-ndk = "/path/to/ndk" - -# The root location of the MUSL installation directory. The library directory -# will also need to contain libunwind.a for an unwinding implementation. Note -# that this option only makes sense for MUSL targets that produce statically -# linked binaries -#musl-root = "..." - -# ============================================================================= -# Distribution options -# -# These options are related to distribution, mostly for the Rust project itself. -# You probably won't need to concern yourself with any of these options -# ============================================================================= -[dist] - -# This is the folder of artifacts that the build system will sign. All files in -# this directory will be signed with the default gpg key using the system `gpg` -# binary. The `asc` and `sha256` files will all be output into the standard dist -# output folder (currently `build/dist`) -# -# This folder should be populated ahead of time before the build system is -# invoked. -#sign-folder = "path/to/folder/to/sign" - -# This is a file which contains the password of the default gpg key. This will -# be passed to `gpg` down the road when signing all files in `sign-folder` -# above. This should be stored in plaintext. -#gpg-password-file = "path/to/gpg/password" - -# The remote address that all artifacts will eventually be uploaded to. The -# build system generates manifests which will point to these urls, and for the -# manifests to be correct they'll have to have the right URLs encoded. -# -# Note that this address should not contain a trailing slash as file names will -# be appended to it. -#upload-addr = "https://example.com/folder" - -# Whether to build a plain source tarball to upload -# We disable that on Windows not to override the one already uploaded on S3 -# as the one built on Windows will contain backslashes in paths causing problems -# on linux -#src-tarball = true diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 807e878eda..65a59d78d7 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -28,7 +28,11 @@ use build_helper::output; use {Build, Compiler, Mode}; use channel; -use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe}; +use util::{cp_r, libdir, is_dylib, cp_filtered, copy}; +use builder::{Builder, RunConfig, ShouldRun, Step}; +use compile; +use tool::{self, Tool}; +use cache::{INTERNER, Interned}; pub fn pkgname(build: &Build, component: &str) -> String { if component == "cargo" { @@ -49,50 +53,79 @@ pub fn tmpdir(build: &Build) -> PathBuf { build.out.join("tmp/dist") } -fn rust_installer(build: &Build) -> Command { - build.tool_cmd(&Compiler::new(0, &build.build), "rust-installer") +fn rust_installer(builder: &Builder) -> Command { + builder.tool_cmd(Tool::RustInstaller) } -/// Builds the `rust-docs` installer component. -/// -/// Slurps up documentation from the `stage`'s `host`. -pub fn docs(build: &Build, stage: u32, host: &str) { - println!("Dist docs stage{} ({})", stage, host); - if !build.config.docs { - println!("\tskipping - docs disabled"); - return +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Docs { + pub stage: u32, + pub host: Interned, +} + +impl Step for Docs { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/doc") } - let name = pkgname(build, "rust-docs"); - let image = tmpdir(build).join(format!("{}-{}-image", name, host)); - let _ = fs::remove_dir_all(&image); - - let dst = image.join("share/doc/rust/html"); - t!(fs::create_dir_all(&dst)); - let src = build.out.join(host).join("doc"); - cp_r(&src, &dst); - - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust-Documentation") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-documentation-is-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rust-docs") - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--bulk-dirs=share/doc/rust/html"); - build.run(&mut cmd); - t!(fs::remove_dir_all(&image)); - - // As part of this step, *also* copy the docs directory to a directory which - // buildbot typically uploads. - if host == build.build { - let dst = distdir(build).join("doc").join(build.rust_package_vers()); + fn make_run(run: RunConfig) { + run.builder.ensure(Docs { + stage: run.builder.top_stage, + host: run.target, + }); + } + + /// Builds the `rust-docs` installer component. + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let host = self.host; + + let name = pkgname(build, "rust-docs"); + + println!("Dist docs ({})", host); + if !build.config.docs { + println!("\tskipping - docs disabled"); + return distdir(build).join(format!("{}-{}.tar.gz", name, host)); + } + + builder.default_doc(None); + + let image = tmpdir(build).join(format!("{}-{}-image", name, host)); + let _ = fs::remove_dir_all(&image); + + let dst = image.join("share/doc/rust/html"); t!(fs::create_dir_all(&dst)); + let src = build.out.join(host).join("doc"); cp_r(&src, &dst); + + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust-Documentation") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-documentation-is-installed.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rust-docs") + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--bulk-dirs=share/doc/rust/html"); + build.run(&mut cmd); + t!(fs::remove_dir_all(&image)); + + // As part of this step, *also* copy the docs directory to a directory which + // buildbot typically uploads. + if host == build.build { + let dst = distdir(build).join("doc").join(build.rust_package_vers()); + t!(fs::create_dir_all(&dst)); + cp_r(&src, &dst); + } + + distdir(build).join(format!("{}-{}.tar.gz", name, host)) } } @@ -115,7 +148,9 @@ fn find_files(files: &[&str], path: &[PathBuf]) -> Vec { found } -fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build: &Build) { +fn make_win_dist( + rust_root: &Path, plat_root: &Path, target_triple: Interned, build: &Build +) { //Ask gcc where it keeps its stuff let mut cmd = Command::new(build.cc(target_triple)); cmd.arg("-print-search-dirs"); @@ -222,262 +257,399 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build: } } -/// Build the `rust-mingw` installer component. -/// -/// This contains all the bits and pieces to run the MinGW Windows targets -/// without any extra installed software (e.g. we bundle gcc, libraries, etc). -pub fn mingw(build: &Build, host: &str) { - println!("Dist mingw ({})", host); - let name = pkgname(build, "rust-mingw"); - let image = tmpdir(build).join(format!("{}-{}-image", name, host)); - let _ = fs::remove_dir_all(&image); - t!(fs::create_dir_all(&image)); - - // The first argument is a "temporary directory" which is just - // thrown away (this contains the runtime DLLs included in the rustc package - // above) and the second argument is where to place all the MinGW components - // (which is what we want). - make_win_dist(&tmpdir(build), &image, host, &build); - - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust-MinGW") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-MinGW-is-installed.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rust-mingw") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); - t!(fs::remove_dir_all(&image)); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Mingw { + host: Interned, } -/// Creates the `rustc` installer component. -pub fn rustc(build: &Build, stage: u32, host: &str) { - println!("Dist rustc stage{} ({})", stage, host); - let name = pkgname(build, "rustc"); - let image = tmpdir(build).join(format!("{}-{}-image", name, host)); - let _ = fs::remove_dir_all(&image); - let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host)); - let _ = fs::remove_dir_all(&overlay); - - // Prepare the rustc "image", what will actually end up getting installed - prepare_image(build, stage, host, &image); - - // Prepare the overlay which is part of the tarball but won't actually be - // installed - let cp = |file: &str| { - install(&build.src.join(file), &overlay, 0o644); - }; - cp("COPYRIGHT"); - cp("LICENSE-APACHE"); - cp("LICENSE-MIT"); - cp("README.md"); - // tiny morsel of metadata is used by rust-packaging - let version = build.rust_version(); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); - - // On MinGW we've got a few runtime DLL dependencies that we need to - // include. The first argument to this script is where to put these DLLs - // (the image we're creating), and the second argument is a junk directory - // to ignore all other MinGW stuff the script creates. - // - // On 32-bit MinGW we're always including a DLL which needs some extra - // licenses to distribute. On 64-bit MinGW we don't actually distribute - // anything requiring us to distribute a license, but it's likely the - // install will *also* include the rust-mingw package, which also needs - // licenses, so to be safe we just include it here in all MinGW packages. - if host.contains("pc-windows-gnu") { - make_win_dist(&image, &tmpdir(build), host, build); - - let dst = image.join("share/doc"); - t!(fs::create_dir_all(&dst)); - cp_r(&build.src.join("src/etc/third-party"), &dst); +impl Step for Mingw { + type Output = Option; + const DEFAULT: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() } - // Finally, wrap everything up in a nice tarball! - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, host)) - .arg("--component-name=rustc") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); - t!(fs::remove_dir_all(&image)); - t!(fs::remove_dir_all(&overlay)); - - fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) { - let src = build.sysroot(&Compiler::new(stage, host)); - let libdir = libdir(host); - - // Copy rustc/rustdoc binaries - t!(fs::create_dir_all(image.join("bin"))); - cp_r(&src.join("bin"), &image.join("bin")); - - // Copy runtime DLLs needed by the compiler - if libdir != "bin" { - for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) { - let name = entry.file_name(); - if let Some(s) = name.to_str() { - if is_dylib(s) { - install(&entry.path(), &image.join(libdir), 0o644); - } - } - } + fn make_run(run: RunConfig) { + run.builder.ensure(Mingw { host: run.target }); + } + + /// Build the `rust-mingw` installer component. + /// + /// This contains all the bits and pieces to run the MinGW Windows targets + /// without any extra installed software (e.g. we bundle gcc, libraries, etc). + fn run(self, builder: &Builder) -> Option { + let build = builder.build; + let host = self.host; + + if !host.contains("pc-windows-gnu") { + return None; } - // Man pages - t!(fs::create_dir_all(image.join("share/man/man1"))); - cp_r(&build.src.join("man"), &image.join("share/man/man1")); + println!("Dist mingw ({})", host); + let name = pkgname(build, "rust-mingw"); + let image = tmpdir(build).join(format!("{}-{}-image", name, host)); + let _ = fs::remove_dir_all(&image); + t!(fs::create_dir_all(&image)); + + // The first argument is a "temporary directory" which is just + // thrown away (this contains the runtime DLLs included in the rustc package + // above) and the second argument is where to place all the MinGW components + // (which is what we want). + make_win_dist(&tmpdir(build), &image, host, &build); + + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust-MinGW") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-MinGW-is-installed.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rust-mingw") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + build.run(&mut cmd); + t!(fs::remove_dir_all(&image)); + Some(distdir(build).join(format!("{}-{}.tar.gz", name, host))) + } +} - // Debugger scripts - debugger_scripts(build, &image, host); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rustc { + pub compiler: Compiler, +} + +impl Step for Rustc { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/librustc") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rustc { + compiler: run.builder.compiler(run.builder.top_stage, run.target), + }); + } + + /// Creates the `rustc` installer component. + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let compiler = self.compiler; + let host = self.compiler.host; - // Misc license info + println!("Dist rustc stage{} ({})", compiler.stage, compiler.host); + let name = pkgname(build, "rustc"); + let image = tmpdir(build).join(format!("{}-{}-image", name, host)); + let _ = fs::remove_dir_all(&image); + let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host)); + let _ = fs::remove_dir_all(&overlay); + + // Prepare the rustc "image", what will actually end up getting installed + prepare_image(builder, compiler, &image); + + // Prepare the overlay which is part of the tarball but won't actually be + // installed let cp = |file: &str| { - install(&build.src.join(file), &image.join("share/doc/rust"), 0o644); + install(&build.src.join(file), &overlay, 0o644); }; cp("COPYRIGHT"); cp("LICENSE-APACHE"); cp("LICENSE-MIT"); cp("README.md"); + // tiny morsel of metadata is used by rust-packaging + let version = build.rust_version(); + t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); + + // On MinGW we've got a few runtime DLL dependencies that we need to + // include. The first argument to this script is where to put these DLLs + // (the image we're creating), and the second argument is a junk directory + // to ignore all other MinGW stuff the script creates. + // + // On 32-bit MinGW we're always including a DLL which needs some extra + // licenses to distribute. On 64-bit MinGW we don't actually distribute + // anything requiring us to distribute a license, but it's likely the + // install will *also* include the rust-mingw package, which also needs + // licenses, so to be safe we just include it here in all MinGW packages. + if host.contains("pc-windows-gnu") { + make_win_dist(&image, &tmpdir(build), host, build); + + let dst = image.join("share/doc"); + t!(fs::create_dir_all(&dst)); + cp_r(&build.src.join("src/etc/third-party"), &dst); + } + + // Finally, wrap everything up in a nice tarball! + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-roll.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg("--non-installed-overlay").arg(&overlay) + .arg(format!("--package-name={}-{}", name, host)) + .arg("--component-name=rustc") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + build.run(&mut cmd); + t!(fs::remove_dir_all(&image)); + t!(fs::remove_dir_all(&overlay)); + + return distdir(build).join(format!("{}-{}.tar.gz", name, host)); + + fn prepare_image(builder: &Builder, compiler: Compiler, image: &Path) { + let host = compiler.host; + let build = builder.build; + let src = builder.sysroot(compiler); + let libdir = libdir(&host); + + // Copy rustc/rustdoc binaries + t!(fs::create_dir_all(image.join("bin"))); + cp_r(&src.join("bin"), &image.join("bin")); + + install(&builder.rustdoc(compiler.host), &image.join("bin"), 0o755); + + // Copy runtime DLLs needed by the compiler + if libdir != "bin" { + for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) { + let name = entry.file_name(); + if let Some(s) = name.to_str() { + if is_dylib(s) { + install(&entry.path(), &image.join(libdir), 0o644); + } + } + } + } + + // Man pages + t!(fs::create_dir_all(image.join("share/man/man1"))); + cp_r(&build.src.join("man"), &image.join("share/man/man1")); + + // Debugger scripts + builder.ensure(DebuggerScripts { + sysroot: INTERNER.intern_path(image.to_owned()), + host, + }); + + // Misc license info + let cp = |file: &str| { + install(&build.src.join(file), &image.join("share/doc/rust"), 0o644); + }; + cp("COPYRIGHT"); + cp("LICENSE-APACHE"); + cp("LICENSE-MIT"); + cp("README.md"); + } } } -/// Copies debugger scripts for `host` into the `sysroot` specified. -pub fn debugger_scripts(build: &Build, - sysroot: &Path, - host: &str) { - let dst = sysroot.join("lib/rustlib/etc"); - t!(fs::create_dir_all(&dst)); - let cp_debugger_script = |file: &str| { - install(&build.src.join("src/etc/").join(file), &dst, 0o644); - }; - if host.contains("windows-msvc") { - // windbg debugger scripts - install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"), - 0o755); - - cp_debugger_script("natvis/liballoc.natvis"); - cp_debugger_script("natvis/libcore.natvis"); - } else { - cp_debugger_script("debugger_pretty_printers_common.py"); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct DebuggerScripts { + pub sysroot: Interned, + pub host: Interned, +} - // gdb debugger scripts - install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), - 0o755); +impl Step for DebuggerScripts { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/lldb_batchmode.py") + } - cp_debugger_script("gdb_load_rust_pretty_printers.py"); - cp_debugger_script("gdb_rust_pretty_printing.py"); + fn make_run(run: RunConfig) { + run.builder.ensure(DebuggerScripts { + sysroot: run.builder.sysroot(run.builder.compiler(run.builder.top_stage, run.host)), + host: run.target, + }); + } - // lldb debugger scripts - install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), + /// Copies debugger scripts for `target` into the `sysroot` specified. + fn run(self, builder: &Builder) { + let build = builder.build; + let host = self.host; + let sysroot = self.sysroot; + let dst = sysroot.join("lib/rustlib/etc"); + t!(fs::create_dir_all(&dst)); + let cp_debugger_script = |file: &str| { + install(&build.src.join("src/etc/").join(file), &dst, 0o644); + }; + if host.contains("windows-msvc") { + // windbg debugger scripts + install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"), 0o755); - cp_debugger_script("lldb_rust_formatters.py"); + cp_debugger_script("natvis/liballoc.natvis"); + cp_debugger_script("natvis/libcore.natvis"); + } else { + cp_debugger_script("debugger_pretty_printers_common.py"); + + // gdb debugger scripts + install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), + 0o755); + + cp_debugger_script("gdb_load_rust_pretty_printers.py"); + cp_debugger_script("gdb_rust_pretty_printing.py"); + + // lldb debugger scripts + install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), + 0o755); + + cp_debugger_script("lldb_rust_formatters.py"); + } } } -/// Creates the `rust-std` installer component as compiled by `compiler` for the -/// target `target`. -pub fn std(build: &Build, compiler: &Compiler, target: &str) { - println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host, - target); - - // The only true set of target libraries came from the build triple, so - // let's reduce redundant work by only producing archives from that host. - if compiler.host != build.build { - println!("\tskipping, not a build host"); - return +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Std { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for Std { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/libstd") } - let name = pkgname(build, "rust-std"); - let image = tmpdir(build).join(format!("{}-{}-image", name, target)); - let _ = fs::remove_dir_all(&image); - - let dst = image.join("lib/rustlib").join(target); - t!(fs::create_dir_all(&dst)); - let mut src = build.sysroot_libdir(compiler, target); - src.pop(); // Remove the trailing /lib folder from the sysroot_libdir - cp_r(&src, &dst); - - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=std-is-standing-at-the-ready.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, target)) - .arg(format!("--component-name=rust-std-{}", target)) - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); - t!(fs::remove_dir_all(&image)); -} + fn make_run(run: RunConfig) { + run.builder.ensure(Std { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + target: run.target, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + + let name = pkgname(build, "rust-std"); + println!("Dist std stage{} ({} -> {})", compiler.stage, &compiler.host, target); -/// The path to the complete rustc-src tarball -pub fn rust_src_location(build: &Build) -> PathBuf { - let plain_name = format!("rustc-{}-src", build.rust_package_vers()); - distdir(build).join(&format!("{}.tar.gz", plain_name)) + // The only true set of target libraries came from the build triple, so + // let's reduce redundant work by only producing archives from that host. + if compiler.host != build.build { + println!("\tskipping, not a build host"); + return distdir(build).join(format!("{}-{}.tar.gz", name, target)); + } + + // We want to package up as many target libraries as possible + // for the `rust-std` package, so if this is a host target we + // depend on librustc and otherwise we just depend on libtest. + if build.hosts.iter().any(|t| t == target) { + builder.ensure(compile::Rustc { compiler, target }); + } else { + builder.ensure(compile::Test { compiler, target }); + } + + let image = tmpdir(build).join(format!("{}-{}-image", name, target)); + let _ = fs::remove_dir_all(&image); + + let dst = image.join("lib/rustlib").join(target); + t!(fs::create_dir_all(&dst)); + let mut src = builder.sysroot_libdir(compiler, target).to_path_buf(); + src.pop(); // Remove the trailing /lib folder from the sysroot_libdir + cp_r(&src, &dst); + + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=std-is-standing-at-the-ready.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rust-std-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); + build.run(&mut cmd); + t!(fs::remove_dir_all(&image)); + distdir(build).join(format!("{}-{}.tar.gz", name, target)) + } } -/// The path to the rust-src component installer -pub fn rust_src_installer(build: &Build) -> PathBuf { - let name = pkgname(build, "rust-src"); - distdir(build).join(&format!("{}.tar.gz", name)) +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Analysis { + pub compiler: Compiler, + pub target: Interned, } -/// Creates a tarball of save-analysis metadata, if available. -pub fn analysis(build: &Build, compiler: &Compiler, target: &str) { - assert!(build.config.extended); - println!("Dist analysis"); +impl Step for Analysis { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_BUILD_TARGETS: bool = true; - if compiler.host != build.build { - println!("\tskipping, not a build host"); - return; + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("analysis").default_condition(builder.build.config.extended) } - // Package save-analysis from stage1 if not doing a full bootstrap, as the - // stage2 artifacts is simply copied from stage1 in that case. - let compiler = if build.force_use_stage1(compiler, target) { - Compiler::new(1, compiler.host) - } else { - compiler.clone() - }; - - let name = pkgname(build, "rust-analysis"); - let image = tmpdir(build).join(format!("{}-{}-image", name, target)); - - let src = build.stage_out(&compiler, Mode::Libstd).join(target).join("release").join("deps"); - - let image_src = src.join("save-analysis"); - let dst = image.join("lib/rustlib").join(target).join("analysis"); - t!(fs::create_dir_all(&dst)); - println!("image_src: {:?}, dst: {:?}", image_src, dst); - cp_r(&image_src, &dst); - - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=save-analysis-saved.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", name, target)) - .arg(format!("--component-name=rust-analysis-{}", target)) - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); - t!(fs::remove_dir_all(&image)); + fn make_run(run: RunConfig) { + run.builder.ensure(Analysis { + compiler: run.builder.compiler(run.builder.top_stage, run.host), + target: run.target, + }); + } + + /// Creates a tarball of save-analysis metadata, if available. + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + assert!(build.config.extended); + println!("Dist analysis"); + let name = pkgname(build, "rust-analysis"); + + if &compiler.host != build.build { + println!("\tskipping, not a build host"); + return distdir(build).join(format!("{}-{}.tar.gz", name, target)); + } + + builder.ensure(Std { compiler, target }); + + // Package save-analysis from stage1 if not doing a full bootstrap, as the + // stage2 artifacts is simply copied from stage1 in that case. + let compiler = if build.force_use_stage1(compiler, target) { + builder.compiler(1, compiler.host) + } else { + compiler.clone() + }; + + let image = tmpdir(build).join(format!("{}-{}-image", name, target)); + + let src = build.stage_out(compiler, Mode::Libstd) + .join(target).join("release").join("deps"); + + let image_src = src.join("save-analysis"); + let dst = image.join("lib/rustlib").join(target).join("analysis"); + t!(fs::create_dir_all(&dst)); + println!("image_src: {:?}, dst: {:?}", image_src, dst); + cp_r(&image_src, &dst); + + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=save-analysis-saved.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rust-analysis-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); + build.run(&mut cmd); + t!(fs::remove_dir_all(&image)); + distdir(build).join(format!("{}-{}.tar.gz", name, target)) + } } fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) { @@ -520,149 +692,196 @@ fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_di } } -/// Creates the `rust-src` installer component -pub fn rust_src(build: &Build) { - println!("Dist src"); - - let name = pkgname(build, "rust-src"); - let image = tmpdir(build).join(format!("{}-image", name)); - let _ = fs::remove_dir_all(&image); - - let dst = image.join("lib/rustlib/src"); - let dst_src = dst.join("rust"); - t!(fs::create_dir_all(&dst_src)); - - // This is the reduced set of paths which will become the rust-src component - // (essentially libstd and all of its path dependencies) - let std_src_dirs = [ - "src/build_helper", - "src/liballoc", - "src/liballoc_jemalloc", - "src/liballoc_system", - "src/libbacktrace", - "src/libcollections", - "src/libcompiler_builtins", - "src/libcore", - "src/liblibc", - "src/libpanic_abort", - "src/libpanic_unwind", - "src/librand", - "src/librustc_asan", - "src/librustc_lsan", - "src/librustc_msan", - "src/librustc_tsan", - "src/libstd", - "src/libstd_unicode", - "src/libunwind", - "src/rustc/compiler_builtins_shim", - "src/rustc/libc_shim", - "src/libtest", - "src/libterm", - "src/jemalloc", - "src/libprofiler_builtins", - ]; - let std_src_dirs_exclude = [ - "src/compiler-rt/test", - "src/jemalloc/test/unit", - ]; +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Src; + +impl Step for Src { + /// The output path of the src installer tarball + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + const ONLY_BUILD: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Src); + } - copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src); - - // Create source tarball in rust-installer format - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Awesome-Source.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}", name)) - .arg("--component-name=rust-src") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); - - t!(fs::remove_dir_all(&image)); + /// Creates the `rust-src` installer component + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + println!("Dist src"); + + let name = pkgname(build, "rust-src"); + let image = tmpdir(build).join(format!("{}-image", name)); + let _ = fs::remove_dir_all(&image); + + let dst = image.join("lib/rustlib/src"); + let dst_src = dst.join("rust"); + t!(fs::create_dir_all(&dst_src)); + + // This is the reduced set of paths which will become the rust-src component + // (essentially libstd and all of its path dependencies) + let std_src_dirs = [ + "src/build_helper", + "src/liballoc", + "src/liballoc_jemalloc", + "src/liballoc_system", + "src/libbacktrace", + "src/libcollections", + "src/libcompiler_builtins", + "src/libcore", + "src/liblibc", + "src/libpanic_abort", + "src/libpanic_unwind", + "src/librand", + "src/librustc_asan", + "src/librustc_lsan", + "src/librustc_msan", + "src/librustc_tsan", + "src/libstd", + "src/libstd_unicode", + "src/libunwind", + "src/rustc/compiler_builtins_shim", + "src/rustc/libc_shim", + "src/libtest", + "src/libterm", + "src/jemalloc", + "src/libprofiler_builtins", + ]; + let std_src_dirs_exclude = [ + "src/compiler-rt/test", + "src/jemalloc/test/unit", + ]; + + copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src); + + // Create source tarball in rust-installer format + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Awesome-Source.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg(format!("--package-name={}", name)) + .arg("--component-name=rust-src") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + build.run(&mut cmd); + + t!(fs::remove_dir_all(&image)); + distdir(build).join(&format!("{}.tar.gz", name)) + } } const CARGO_VENDOR_VERSION: &str = "0.1.4"; -/// Creates the plain source tarball -pub fn plain_source_tarball(build: &Build) { - println!("Create plain source tarball"); - - // Make sure that the root folder of tarball has the correct name - let plain_name = format!("{}-src", pkgname(build, "rustc")); - let plain_dst_src = tmpdir(build).join(&plain_name); - let _ = fs::remove_dir_all(&plain_dst_src); - t!(fs::create_dir_all(&plain_dst_src)); - - // This is the set of root paths which will become part of the source package - let src_files = [ - "COPYRIGHT", - "LICENSE-APACHE", - "LICENSE-MIT", - "CONTRIBUTING.md", - "README.md", - "RELEASES.md", - "configure", - "x.py", - ]; - let src_dirs = [ - "man", - "src", - ]; +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct PlainSourceTarball; - copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src); +impl Step for PlainSourceTarball { + /// Produces the location of the tarball generated + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + const ONLY_BUILD: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src").default_condition(builder.config.rust_dist_src) + } - // Copy the files normally - for item in &src_files { - copy(&build.src.join(item), &plain_dst_src.join(item)); + fn make_run(run: RunConfig) { + run.builder.ensure(PlainSourceTarball); } - // Create the version file - write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes()); + /// Creates the plain source tarball + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + println!("Create plain source tarball"); + + // Make sure that the root folder of tarball has the correct name + let plain_name = format!("{}-src", pkgname(build, "rustc")); + let plain_dst_src = tmpdir(build).join(&plain_name); + let _ = fs::remove_dir_all(&plain_dst_src); + t!(fs::create_dir_all(&plain_dst_src)); + + // This is the set of root paths which will become part of the source package + let src_files = [ + "COPYRIGHT", + "LICENSE-APACHE", + "LICENSE-MIT", + "CONTRIBUTING.md", + "README.md", + "RELEASES.md", + "configure", + "x.py", + ]; + let src_dirs = [ + "man", + "src", + ]; + + copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src); - // If we're building from git sources, we need to vendor a complete distribution. - if build.rust_info.is_git() { - // Get cargo-vendor installed, if it isn't already. - let mut has_cargo_vendor = false; - let mut cmd = Command::new(&build.initial_cargo); - for line in output(cmd.arg("install").arg("--list")).lines() { - has_cargo_vendor |= line.starts_with("cargo-vendor "); + // Copy the files normally + for item in &src_files { + copy(&build.src.join(item), &plain_dst_src.join(item)); } - if !has_cargo_vendor { + + // Create the version file + write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes()); + + // If we're building from git sources, we need to vendor a complete distribution. + if build.rust_info.is_git() { + // Get cargo-vendor installed, if it isn't already. + let mut has_cargo_vendor = false; + let mut cmd = Command::new(&build.initial_cargo); + for line in output(cmd.arg("install").arg("--list")).lines() { + has_cargo_vendor |= line.starts_with("cargo-vendor "); + } + if !has_cargo_vendor { + let mut cmd = Command::new(&build.initial_cargo); + cmd.arg("install") + .arg("--force") + .arg("--debug") + .arg("--vers").arg(CARGO_VENDOR_VERSION) + .arg("cargo-vendor") + .env("RUSTC", &build.initial_rustc); + build.run(&mut cmd); + } + + // Vendor all Cargo dependencies let mut cmd = Command::new(&build.initial_cargo); - cmd.arg("install") - .arg("--force") - .arg("--debug") - .arg("--vers").arg(CARGO_VENDOR_VERSION) - .arg("cargo-vendor") - .env("RUSTC", &build.initial_rustc); + cmd.arg("vendor") + .current_dir(&plain_dst_src.join("src")); build.run(&mut cmd); } - // Vendor all Cargo dependencies - let mut cmd = Command::new(&build.initial_cargo); - cmd.arg("vendor") - .current_dir(&plain_dst_src.join("src")); + // Create plain source tarball + let plain_name = format!("rustc-{}-src", build.rust_package_vers()); + let mut tarball = distdir(build).join(&format!("{}.tar.gz", plain_name)); + tarball.set_extension(""); // strip .gz + tarball.set_extension(""); // strip .tar + if let Some(dir) = tarball.parent() { + t!(fs::create_dir_all(dir)); + } + println!("running installer"); + let mut cmd = rust_installer(builder); + cmd.arg("tarball") + .arg("--input").arg(&plain_name) + .arg("--output").arg(&tarball) + .arg("--work-dir=.") + .current_dir(tmpdir(build)); build.run(&mut cmd); + distdir(build).join(&format!("{}.tar.gz", plain_name)) } - - // Create plain source tarball - let mut tarball = rust_src_location(build); - tarball.set_extension(""); // strip .gz - tarball.set_extension(""); // strip .tar - if let Some(dir) = tarball.parent() { - t!(fs::create_dir_all(dir)); - } - let mut cmd = rust_installer(build); - cmd.arg("tarball") - .arg("--input").arg(&plain_name) - .arg("--output").arg(&tarball) - .arg("--work-dir=.") - .current_dir(tmpdir(build)); - build.run(&mut cmd); } fn install(src: &Path, dstdir: &Path, perms: u32) { @@ -704,422 +923,561 @@ fn write_file(path: &Path, data: &[u8]) { t!(vf.write_all(data)); } -pub fn cargo(build: &Build, stage: u32, target: &str) { - println!("Dist cargo stage{} ({})", stage, target); - let compiler = Compiler::new(stage, &build.build); - - let src = build.src.join("src/tools/cargo"); - let etc = src.join("src/etc"); - let release_num = build.release_num("cargo"); - let name = pkgname(build, "cargo"); - let version = build.cargo_info.version(build, &release_num); - - let tmp = tmpdir(build); - let image = tmp.join("cargo-image"); - drop(fs::remove_dir_all(&image)); - t!(fs::create_dir_all(&image)); - - // Prepare the image directory - t!(fs::create_dir_all(image.join("share/zsh/site-functions"))); - t!(fs::create_dir_all(image.join("etc/bash_completion.d"))); - let cargo = build.cargo_out(&compiler, Mode::Tool, target) - .join(exe("cargo", target)); - install(&cargo, &image.join("bin"), 0o755); - for man in t!(etc.join("man").read_dir()) { - let man = t!(man); - install(&man.path(), &image.join("share/man/man1"), 0o644); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Cargo { + pub stage: u32, + pub target: Interned, +} + +impl Step for Cargo { + type Output = PathBuf; + const ONLY_BUILD_TARGETS: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("cargo") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Cargo { + stage: run.builder.top_stage, + target: run.target, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let stage = self.stage; + let target = self.target; + + println!("Dist cargo stage{} ({})", stage, target); + let src = build.src.join("src/tools/cargo"); + let etc = src.join("src/etc"); + let release_num = build.release_num("cargo"); + let name = pkgname(build, "cargo"); + let version = builder.cargo_info.version(build, &release_num); + + let tmp = tmpdir(build); + let image = tmp.join("cargo-image"); + drop(fs::remove_dir_all(&image)); + t!(fs::create_dir_all(&image)); + + // Prepare the image directory + t!(fs::create_dir_all(image.join("share/zsh/site-functions"))); + t!(fs::create_dir_all(image.join("etc/bash_completion.d"))); + let cargo = builder.ensure(tool::Cargo { + compiler: builder.compiler(stage, build.build), + target + }); + install(&cargo, &image.join("bin"), 0o755); + for man in t!(etc.join("man").read_dir()) { + let man = t!(man); + install(&man.path(), &image.join("share/man/man1"), 0o644); + } + install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644); + copy(&etc.join("cargo.bashcomp.sh"), + &image.join("etc/bash_completion.d/cargo")); + let doc = image.join("share/doc/cargo"); + install(&src.join("README.md"), &doc, 0o644); + install(&src.join("LICENSE-MIT"), &doc, 0o644); + install(&src.join("LICENSE-APACHE"), &doc, 0o644); + install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644); + + // Prepare the overlay + let overlay = tmp.join("cargo-overlay"); + drop(fs::remove_dir_all(&overlay)); + t!(fs::create_dir_all(&overlay)); + install(&src.join("README.md"), &overlay, 0o644); + install(&src.join("LICENSE-MIT"), &overlay, 0o644); + install(&src.join("LICENSE-APACHE"), &overlay, 0o644); + install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644); + t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); + + // Generate the installer tarball + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-roll.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg("--non-installed-overlay").arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--component-name=cargo") + .arg("--legacy-manifest-dirs=rustlib,cargo"); + build.run(&mut cmd); + distdir(build).join(format!("{}-{}.tar.gz", name, target)) } - install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644); - copy(&etc.join("cargo.bashcomp.sh"), - &image.join("etc/bash_completion.d/cargo")); - let doc = image.join("share/doc/cargo"); - install(&src.join("README.md"), &doc, 0o644); - install(&src.join("LICENSE-MIT"), &doc, 0o644); - install(&src.join("LICENSE-APACHE"), &doc, 0o644); - install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644); - - // Prepare the overlay - let overlay = tmp.join("cargo-overlay"); - drop(fs::remove_dir_all(&overlay)); - t!(fs::create_dir_all(&overlay)); - install(&src.join("README.md"), &overlay, 0o644); - install(&src.join("LICENSE-MIT"), &overlay, 0o644); - install(&src.join("LICENSE-APACHE"), &overlay, 0o644); - install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); - - // Generate the installer tarball - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--component-name=cargo") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); } -pub fn rls(build: &Build, stage: u32, target: &str) { - assert!(build.config.extended); - println!("Dist RLS stage{} ({})", stage, target); - let compiler = Compiler::new(stage, &build.build); - - let src = build.src.join("src/tools/rls"); - let release_num = build.release_num("rls"); - let name = pkgname(build, "rls"); - let version = build.rls_info.version(build, &release_num); - - let tmp = tmpdir(build); - let image = tmp.join("rls-image"); - drop(fs::remove_dir_all(&image)); - t!(fs::create_dir_all(&image)); - - // Prepare the image directory - let rls = build.cargo_out(&compiler, Mode::Tool, target) - .join(exe("rls", target)); - install(&rls, &image.join("bin"), 0o755); - let doc = image.join("share/doc/rls"); - install(&src.join("README.md"), &doc, 0o644); - install(&src.join("LICENSE-MIT"), &doc, 0o644); - install(&src.join("LICENSE-APACHE"), &doc, 0o644); - - // Prepare the overlay - let overlay = tmp.join("rls-overlay"); - drop(fs::remove_dir_all(&overlay)); - t!(fs::create_dir_all(&overlay)); - install(&src.join("README.md"), &overlay, 0o644); - install(&src.join("LICENSE-MIT"), &overlay, 0o644); - install(&src.join("LICENSE-APACHE"), &overlay, 0o644); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); - - // Generate the installer tarball - let mut cmd = rust_installer(build); - cmd.arg("generate") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=RLS-ready-to-serve.") - .arg("--image-dir").arg(&image) - .arg("--work-dir").arg(&tmpdir(build)) - .arg("--output-dir").arg(&distdir(build)) - .arg("--non-installed-overlay").arg(&overlay) - .arg(format!("--package-name={}-{}", name, target)) - .arg("--component-name=rls") - .arg("--legacy-manifest-dirs=rustlib,cargo"); - build.run(&mut cmd); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rls { + pub stage: u32, + pub target: Interned, } -/// Creates a combined installer for the specified target in the provided stage. -pub fn extended(build: &Build, stage: u32, target: &str) { - println!("Dist extended stage{} ({})", stage, target); - - let dist = distdir(build); - let rustc_installer = dist.join(format!("{}-{}.tar.gz", - pkgname(build, "rustc"), - target)); - let cargo_installer = dist.join(format!("{}-{}.tar.gz", - pkgname(build, "cargo"), - target)); - let analysis_installer = dist.join(format!("{}-{}.tar.gz", - pkgname(build, "rust-analysis"), - target)); - let docs_installer = dist.join(format!("{}-{}.tar.gz", - pkgname(build, "rust-docs"), - target)); - let mingw_installer = dist.join(format!("{}-{}.tar.gz", - pkgname(build, "rust-mingw"), - target)); - let std_installer = dist.join(format!("{}-{}.tar.gz", - pkgname(build, "rust-std"), - target)); - - let tmp = tmpdir(build); - let overlay = tmp.join("extended-overlay"); - let etc = build.src.join("src/etc/installer"); - let work = tmp.join("work"); - - let _ = fs::remove_dir_all(&overlay); - install(&build.src.join("COPYRIGHT"), &overlay, 0o644); - install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644); - install(&build.src.join("LICENSE-MIT"), &overlay, 0o644); - let version = build.rust_version(); - t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); - install(&etc.join("README.md"), &overlay, 0o644); - - // When rust-std package split from rustc, we needed to ensure that during - // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering - // the std files during uninstall. To do this ensure that rustc comes - // before rust-std in the list below. - let mut tarballs = vec![rustc_installer, cargo_installer, - analysis_installer, docs_installer, std_installer]; - if target.contains("pc-windows-gnu") { - tarballs.push(mingw_installer); - } - let mut input_tarballs = tarballs[0].as_os_str().to_owned(); - for tarball in &tarballs[1..] { - input_tarballs.push(","); - input_tarballs.push(tarball); +impl Step for Rls { + type Output = PathBuf; + const ONLY_BUILD_TARGETS: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("rls") } - let mut cmd = rust_installer(build); - cmd.arg("combine") - .arg("--product-name=Rust") - .arg("--rel-manifest-dir=rustlib") - .arg("--success-message=Rust-is-ready-to-roll.") - .arg("--work-dir").arg(&work) - .arg("--output-dir").arg(&distdir(build)) - .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target)) - .arg("--legacy-manifest-dirs=rustlib,cargo") - .arg("--input-tarballs").arg(input_tarballs) - .arg("--non-installed-overlay").arg(&overlay); - build.run(&mut cmd); - - let mut license = String::new(); - t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license)); - license.push_str("\n"); - t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license)); - license.push_str("\n"); - t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license)); - - let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18"; - let mut rtf = rtf.to_string(); - rtf.push_str("\n"); - for line in license.lines() { - rtf.push_str(line); - rtf.push_str("\\line "); + fn make_run(run: RunConfig) { + run.builder.ensure(Rls { + stage: run.builder.top_stage, + target: run.target, + }); } - rtf.push_str("}"); - - if target.contains("apple-darwin") { - let pkg = tmp.join("pkg"); - let _ = fs::remove_dir_all(&pkg); - t!(fs::create_dir_all(pkg.join("rustc"))); - t!(fs::create_dir_all(pkg.join("cargo"))); - t!(fs::create_dir_all(pkg.join("rust-docs"))); - t!(fs::create_dir_all(pkg.join("rust-std"))); - - cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)), - &pkg.join("rustc")); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)), - &pkg.join("cargo")); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)), - &pkg.join("rust-docs")); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)), - &pkg.join("rust-std")); - - install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755); - install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755); - install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755); - install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755); - - let pkgbuild = |component: &str| { - let mut cmd = Command::new("pkgbuild"); - cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component)) - .arg("--scripts").arg(pkg.join(component)) - .arg("--nopayload") - .arg(pkg.join(component).with_extension("pkg")); - build.run(&mut cmd); - }; - pkgbuild("rustc"); - pkgbuild("cargo"); - pkgbuild("rust-docs"); - pkgbuild("rust-std"); - - // create an 'uninstall' package - install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755); - pkgbuild("uninstall"); - - t!(fs::create_dir_all(pkg.join("res"))); - t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes())); - install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644); - let mut cmd = Command::new("productbuild"); - cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml")) - .arg("--resources").arg(pkg.join("res")) - .arg(distdir(build).join(format!("{}-{}.pkg", - pkgname(build, "rust"), - target))) - .arg("--package-path").arg(&pkg); + + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let stage = self.stage; + let target = self.target; + assert!(build.config.extended); + + println!("Dist RLS stage{} ({})", stage, target); + let src = build.src.join("src/tools/rls"); + let release_num = build.release_num("rls"); + let name = pkgname(build, "rls"); + let version = build.rls_info.version(build, &release_num); + + let tmp = tmpdir(build); + let image = tmp.join("rls-image"); + drop(fs::remove_dir_all(&image)); + t!(fs::create_dir_all(&image)); + + // Prepare the image directory + let rls = builder.ensure(tool::Rls { + compiler: builder.compiler(stage, build.build), + target + }); + install(&rls, &image.join("bin"), 0o755); + let doc = image.join("share/doc/rls"); + install(&src.join("README.md"), &doc, 0o644); + install(&src.join("LICENSE-MIT"), &doc, 0o644); + install(&src.join("LICENSE-APACHE"), &doc, 0o644); + + // Prepare the overlay + let overlay = tmp.join("rls-overlay"); + drop(fs::remove_dir_all(&overlay)); + t!(fs::create_dir_all(&overlay)); + install(&src.join("README.md"), &overlay, 0o644); + install(&src.join("LICENSE-MIT"), &overlay, 0o644); + install(&src.join("LICENSE-APACHE"), &overlay, 0o644); + t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); + + // Generate the installer tarball + let mut cmd = rust_installer(builder); + cmd.arg("generate") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=RLS-ready-to-serve.") + .arg("--image-dir").arg(&image) + .arg("--work-dir").arg(&tmpdir(build)) + .arg("--output-dir").arg(&distdir(build)) + .arg("--non-installed-overlay").arg(&overlay) + .arg(format!("--package-name={}-{}", name, target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); + + if build.config.channel == "nightly" { + cmd.arg("--component-name=rls"); + } else { + cmd.arg("--component-name=rls-preview"); + } + build.run(&mut cmd); + distdir(build).join(format!("{}-{}.tar.gz", name, target)) } +} - if target.contains("windows") { - let exe = tmp.join("exe"); - let _ = fs::remove_dir_all(&exe); - t!(fs::create_dir_all(exe.join("rustc"))); - t!(fs::create_dir_all(exe.join("cargo"))); - t!(fs::create_dir_all(exe.join("rust-docs"))); - t!(fs::create_dir_all(exe.join("rust-std"))); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)) - .join("rustc"), - &exe.join("rustc")); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)) - .join("cargo"), - &exe.join("cargo")); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)) - .join("rust-docs"), - &exe.join("rust-docs")); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)) - .join(format!("rust-std-{}", target)), - &exe.join("rust-std")); - - t!(fs::remove_file(exe.join("rustc/manifest.in"))); - t!(fs::remove_file(exe.join("cargo/manifest.in"))); - t!(fs::remove_file(exe.join("rust-docs/manifest.in"))); - t!(fs::remove_file(exe.join("rust-std/manifest.in"))); - - if target.contains("windows-gnu") { - t!(fs::create_dir_all(exe.join("rust-mingw"))); - cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target)) - .join("rust-mingw"), - &exe.join("rust-mingw")); - t!(fs::remove_file(exe.join("rust-mingw/manifest.in"))); - } +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Extended { + stage: u32, + host: Interned, + target: Interned, +} + +impl Step for Extended { + type Output = (); + const DEFAULT: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("extended").default_condition(builder.config.extended) + } - install(&etc.join("exe/rust.iss"), &exe, 0o644); - install(&etc.join("exe/modpath.iss"), &exe, 0o644); - install(&etc.join("exe/upgrade.iss"), &exe, 0o644); - install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644); - t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes())); - - // Generate exe installer - let mut cmd = Command::new("iscc"); - cmd.arg("rust.iss") - .current_dir(&exe); - if target.contains("windows-gnu") { - cmd.arg("/dMINGW"); + fn make_run(run: RunConfig) { + run.builder.ensure(Extended { + stage: run.builder.top_stage, + host: run.host, + target: run.target, + }); + } + + /// Creates a combined installer for the specified target in the provided stage. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let target = self.target; + + println!("Dist extended stage{} ({})", stage, target); + + let rustc_installer = builder.ensure(Rustc { + compiler: builder.compiler(stage, target), + }); + let cargo_installer = builder.ensure(Cargo { stage, target }); + let rls_installer = builder.ensure(Rls { stage, target }); + let mingw_installer = builder.ensure(Mingw { host: target }); + let analysis_installer = builder.ensure(Analysis { + compiler: builder.compiler(stage, self.host), + target + }); + + let docs_installer = builder.ensure(Docs { stage, host: target, }); + let std_installer = builder.ensure(Std { + compiler: builder.compiler(stage, self.host), + target, + }); + + let tmp = tmpdir(build); + let overlay = tmp.join("extended-overlay"); + let etc = build.src.join("src/etc/installer"); + let work = tmp.join("work"); + + let _ = fs::remove_dir_all(&overlay); + install(&build.src.join("COPYRIGHT"), &overlay, 0o644); + install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644); + install(&build.src.join("LICENSE-MIT"), &overlay, 0o644); + let version = build.rust_version(); + t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes())); + install(&etc.join("README.md"), &overlay, 0o644); + + // When rust-std package split from rustc, we needed to ensure that during + // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering + // the std files during uninstall. To do this ensure that rustc comes + // before rust-std in the list below. + let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer, + analysis_installer, docs_installer, std_installer]; + if target.contains("pc-windows-gnu") { + tarballs.push(mingw_installer.unwrap()); + } + let mut input_tarballs = tarballs[0].as_os_str().to_owned(); + for tarball in &tarballs[1..] { + input_tarballs.push(","); + input_tarballs.push(tarball); } - add_env(build, &mut cmd, target); + + let mut cmd = rust_installer(builder); + cmd.arg("combine") + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=Rust-is-ready-to-roll.") + .arg("--work-dir").arg(&work) + .arg("--output-dir").arg(&distdir(build)) + .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target)) + .arg("--legacy-manifest-dirs=rustlib,cargo") + .arg("--input-tarballs").arg(input_tarballs) + .arg("--non-installed-overlay").arg(&overlay); build.run(&mut cmd); - install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)), - &distdir(build), - 0o755); - // Generate msi installer - let wix = PathBuf::from(env::var_os("WIX").unwrap()); - let heat = wix.join("bin/heat.exe"); - let candle = wix.join("bin/candle.exe"); - let light = wix.join("bin/light.exe"); - - let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"]; - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rustc") - .args(&heat_flags) - .arg("-cg").arg("RustcGroup") - .arg("-dr").arg("Rustc") - .arg("-var").arg("var.RustcDir") - .arg("-out").arg(exe.join("RustcGroup.wxs"))); - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-docs") - .args(&heat_flags) - .arg("-cg").arg("DocsGroup") - .arg("-dr").arg("Docs") - .arg("-var").arg("var.DocsDir") - .arg("-out").arg(exe.join("DocsGroup.wxs")) - .arg("-t").arg(etc.join("msi/squash-components.xsl"))); - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("cargo") - .args(&heat_flags) - .arg("-cg").arg("CargoGroup") - .arg("-dr").arg("Cargo") - .arg("-var").arg("var.CargoDir") - .arg("-out").arg(exe.join("CargoGroup.wxs")) - .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); - build.run(Command::new(&heat) - .current_dir(&exe) - .arg("dir") - .arg("rust-std") - .args(&heat_flags) - .arg("-cg").arg("StdGroup") - .arg("-dr").arg("Std") - .arg("-var").arg("var.StdDir") - .arg("-out").arg(exe.join("StdGroup.wxs"))); - if target.contains("windows-gnu") { + let mut license = String::new(); + t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license)); + license.push_str("\n"); + t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license)); + license.push_str("\n"); + t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license)); + + let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18"; + let mut rtf = rtf.to_string(); + rtf.push_str("\n"); + for line in license.lines() { + rtf.push_str(line); + rtf.push_str("\\line "); + } + rtf.push_str("}"); + + if target.contains("apple-darwin") { + let pkg = tmp.join("pkg"); + let _ = fs::remove_dir_all(&pkg); + t!(fs::create_dir_all(pkg.join("rustc"))); + t!(fs::create_dir_all(pkg.join("cargo"))); + t!(fs::create_dir_all(pkg.join("rust-docs"))); + t!(fs::create_dir_all(pkg.join("rust-std"))); + t!(fs::create_dir_all(pkg.join("rls"))); + t!(fs::create_dir_all(pkg.join("rust-analysis"))); + + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)), + &pkg.join("rustc")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)), + &pkg.join("cargo")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)), + &pkg.join("rust-docs")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)), + &pkg.join("rust-std")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target)), + &pkg.join("rls")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target)), + &pkg.join("rust-analysis")); + + install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755); + install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755); + install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755); + install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755); + install(&etc.join("pkg/postinstall"), &pkg.join("rls"), 0o755); + install(&etc.join("pkg/postinstall"), &pkg.join("rust-analysis"), 0o755); + + let pkgbuild = |component: &str| { + let mut cmd = Command::new("pkgbuild"); + cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component)) + .arg("--scripts").arg(pkg.join(component)) + .arg("--nopayload") + .arg(pkg.join(component).with_extension("pkg")); + build.run(&mut cmd); + }; + pkgbuild("rustc"); + pkgbuild("cargo"); + pkgbuild("rust-docs"); + pkgbuild("rust-std"); + pkgbuild("rls"); + pkgbuild("rust-analysis"); + + // create an 'uninstall' package + install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755); + pkgbuild("uninstall"); + + t!(fs::create_dir_all(pkg.join("res"))); + t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes())); + install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644); + let mut cmd = Command::new("productbuild"); + cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml")) + .arg("--resources").arg(pkg.join("res")) + .arg(distdir(build).join(format!("{}-{}.pkg", + pkgname(build, "rust"), + target))) + .arg("--package-path").arg(&pkg); + build.run(&mut cmd); + } + + if target.contains("windows") { + let exe = tmp.join("exe"); + let _ = fs::remove_dir_all(&exe); + t!(fs::create_dir_all(exe.join("rustc"))); + t!(fs::create_dir_all(exe.join("cargo"))); + t!(fs::create_dir_all(exe.join("rls"))); + t!(fs::create_dir_all(exe.join("rust-analysis"))); + t!(fs::create_dir_all(exe.join("rust-docs"))); + t!(fs::create_dir_all(exe.join("rust-std"))); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)) + .join("rustc"), + &exe.join("rustc")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)) + .join("cargo"), + &exe.join("cargo")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)) + .join("rust-docs"), + &exe.join("rust-docs")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)) + .join(format!("rust-std-{}", target)), + &exe.join("rust-std")); + let rls_path = if build.config.channel == "nightly" { + work.join(&format!("{}-{}", pkgname(build, "rls"), target)).join("rls") + } else { + work.join(&format!("{}-{}", pkgname(build, "rls"), target)).join("rls-preview") + }; + cp_r(&rls_path, &exe.join("rls")); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target)) + .join(format!("rust-analysis-{}", target)), + &exe.join("rust-analysis")); + + t!(fs::remove_file(exe.join("rustc/manifest.in"))); + t!(fs::remove_file(exe.join("cargo/manifest.in"))); + t!(fs::remove_file(exe.join("rust-docs/manifest.in"))); + t!(fs::remove_file(exe.join("rust-std/manifest.in"))); + t!(fs::remove_file(exe.join("rls/manifest.in"))); + t!(fs::remove_file(exe.join("rust-analysis/manifest.in"))); + + if target.contains("windows-gnu") { + t!(fs::create_dir_all(exe.join("rust-mingw"))); + cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target)) + .join("rust-mingw"), + &exe.join("rust-mingw")); + t!(fs::remove_file(exe.join("rust-mingw/manifest.in"))); + } + + install(&etc.join("exe/rust.iss"), &exe, 0o644); + install(&etc.join("exe/modpath.iss"), &exe, 0o644); + install(&etc.join("exe/upgrade.iss"), &exe, 0o644); + install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644); + t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes())); + + // Generate exe installer + let mut cmd = Command::new("iscc"); + cmd.arg("rust.iss") + .current_dir(&exe); + if target.contains("windows-gnu") { + cmd.arg("/dMINGW"); + } + add_env(build, &mut cmd, target); + build.run(&mut cmd); + install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)), + &distdir(build), + 0o755); + + // Generate msi installer + let wix = PathBuf::from(env::var_os("WIX").unwrap()); + let heat = wix.join("bin/heat.exe"); + let candle = wix.join("bin/candle.exe"); + let light = wix.join("bin/light.exe"); + + let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"]; build.run(Command::new(&heat) .current_dir(&exe) .arg("dir") - .arg("rust-mingw") + .arg("rustc") .args(&heat_flags) - .arg("-cg").arg("GccGroup") - .arg("-dr").arg("Gcc") - .arg("-var").arg("var.GccDir") - .arg("-out").arg(exe.join("GccGroup.wxs"))); - } + .arg("-cg").arg("RustcGroup") + .arg("-dr").arg("Rustc") + .arg("-var").arg("var.RustcDir") + .arg("-out").arg(exe.join("RustcGroup.wxs"))); + build.run(Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-docs") + .args(&heat_flags) + .arg("-cg").arg("DocsGroup") + .arg("-dr").arg("Docs") + .arg("-var").arg("var.DocsDir") + .arg("-out").arg(exe.join("DocsGroup.wxs")) + .arg("-t").arg(etc.join("msi/squash-components.xsl"))); + build.run(Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("cargo") + .args(&heat_flags) + .arg("-cg").arg("CargoGroup") + .arg("-dr").arg("Cargo") + .arg("-var").arg("var.CargoDir") + .arg("-out").arg(exe.join("CargoGroup.wxs")) + .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + build.run(Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-std") + .args(&heat_flags) + .arg("-cg").arg("StdGroup") + .arg("-dr").arg("Std") + .arg("-var").arg("var.StdDir") + .arg("-out").arg(exe.join("StdGroup.wxs"))); + build.run(Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rls") + .args(&heat_flags) + .arg("-cg").arg("RlsGroup") + .arg("-dr").arg("Rls") + .arg("-var").arg("var.RlsDir") + .arg("-out").arg(exe.join("RlsGroup.wxs")) + .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + build.run(Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-analysis") + .args(&heat_flags) + .arg("-cg").arg("AnalysisGroup") + .arg("-dr").arg("Analysis") + .arg("-var").arg("var.AnalysisDir") + .arg("-out").arg(exe.join("AnalysisGroup.wxs")) + .arg("-t").arg(etc.join("msi/remove-duplicates.xsl"))); + if target.contains("windows-gnu") { + build.run(Command::new(&heat) + .current_dir(&exe) + .arg("dir") + .arg("rust-mingw") + .args(&heat_flags) + .arg("-cg").arg("GccGroup") + .arg("-dr").arg("Gcc") + .arg("-var").arg("var.GccDir") + .arg("-out").arg(exe.join("GccGroup.wxs"))); + } - let candle = |input: &Path| { - let output = exe.join(input.file_stem().unwrap()) - .with_extension("wixobj"); - let arch = if target.contains("x86_64") {"x64"} else {"x86"}; - let mut cmd = Command::new(&candle); - cmd.current_dir(&exe) - .arg("-nologo") - .arg("-dRustcDir=rustc") - .arg("-dDocsDir=rust-docs") - .arg("-dCargoDir=cargo") - .arg("-dStdDir=rust-std") - .arg("-arch").arg(&arch) - .arg("-out").arg(&output) - .arg(&input); - add_env(build, &mut cmd, target); + let candle = |input: &Path| { + let output = exe.join(input.file_stem().unwrap()) + .with_extension("wixobj"); + let arch = if target.contains("x86_64") {"x64"} else {"x86"}; + let mut cmd = Command::new(&candle); + cmd.current_dir(&exe) + .arg("-nologo") + .arg("-dRustcDir=rustc") + .arg("-dDocsDir=rust-docs") + .arg("-dCargoDir=cargo") + .arg("-dStdDir=rust-std") + .arg("-dRlsDir=rls") + .arg("-dAnalysisDir=rust-analysis") + .arg("-arch").arg(&arch) + .arg("-out").arg(&output) + .arg(&input); + add_env(build, &mut cmd, target); + + if target.contains("windows-gnu") { + cmd.arg("-dGccDir=rust-mingw"); + } + build.run(&mut cmd); + }; + candle(&etc.join("msi/rust.wxs")); + candle(&etc.join("msi/ui.wxs")); + candle(&etc.join("msi/rustwelcomedlg.wxs")); + candle("RustcGroup.wxs".as_ref()); + candle("DocsGroup.wxs".as_ref()); + candle("CargoGroup.wxs".as_ref()); + candle("StdGroup.wxs".as_ref()); + candle("RlsGroup.wxs".as_ref()); + candle("AnalysisGroup.wxs".as_ref()); if target.contains("windows-gnu") { - cmd.arg("-dGccDir=rust-mingw"); + candle("GccGroup.wxs".as_ref()); } - build.run(&mut cmd); - }; - candle(&etc.join("msi/rust.wxs")); - candle(&etc.join("msi/ui.wxs")); - candle(&etc.join("msi/rustwelcomedlg.wxs")); - candle("RustcGroup.wxs".as_ref()); - candle("DocsGroup.wxs".as_ref()); - candle("CargoGroup.wxs".as_ref()); - candle("StdGroup.wxs".as_ref()); - - if target.contains("windows-gnu") { - candle("GccGroup.wxs".as_ref()); - } - t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes())); - install(&etc.join("gfx/banner.bmp"), &exe, 0o644); - install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644); - - let filename = format!("{}-{}.msi", pkgname(build, "rust"), target); - let mut cmd = Command::new(&light); - cmd.arg("-nologo") - .arg("-ext").arg("WixUIExtension") - .arg("-ext").arg("WixUtilExtension") - .arg("-out").arg(exe.join(&filename)) - .arg("rust.wixobj") - .arg("ui.wixobj") - .arg("rustwelcomedlg.wixobj") - .arg("RustcGroup.wixobj") - .arg("DocsGroup.wixobj") - .arg("CargoGroup.wixobj") - .arg("StdGroup.wixobj") - .current_dir(&exe); - - if target.contains("windows-gnu") { - cmd.arg("GccGroup.wixobj"); - } - // ICE57 wrongly complains about the shortcuts - cmd.arg("-sice:ICE57"); + t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes())); + install(&etc.join("gfx/banner.bmp"), &exe, 0o644); + install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644); + + let filename = format!("{}-{}.msi", pkgname(build, "rust"), target); + let mut cmd = Command::new(&light); + cmd.arg("-nologo") + .arg("-ext").arg("WixUIExtension") + .arg("-ext").arg("WixUtilExtension") + .arg("-out").arg(exe.join(&filename)) + .arg("rust.wixobj") + .arg("ui.wixobj") + .arg("rustwelcomedlg.wixobj") + .arg("RustcGroup.wixobj") + .arg("DocsGroup.wixobj") + .arg("CargoGroup.wixobj") + .arg("StdGroup.wixobj") + .arg("RlsGroup.wixobj") + .arg("AnalysisGroup.wixobj") + .current_dir(&exe); - build.run(&mut cmd); + if target.contains("windows-gnu") { + cmd.arg("GccGroup.wixobj"); + } + // ICE57 wrongly complains about the shortcuts + cmd.arg("-sice:ICE57"); + + build.run(&mut cmd); - t!(fs::rename(exe.join(&filename), distdir(build).join(&filename))); + t!(fs::rename(exe.join(&filename), distdir(build).join(&filename))); + } } } -fn add_env(build: &Build, cmd: &mut Command, target: &str) { +fn add_env(build: &Build, cmd: &mut Command, target: Interned) { let mut parts = channel::CFG_RELEASE_NUM.split('.'); cmd.env("CFG_RELEASE_INFO", build.rust_version()) .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM) @@ -1149,34 +1507,53 @@ fn add_env(build: &Build, cmd: &mut Command, target: &str) { } } -pub fn hash_and_sign(build: &Build) { - let compiler = Compiler::new(0, &build.build); - let mut cmd = build.tool_cmd(&compiler, "build-manifest"); - let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| { - panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n") - }); - let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| { - panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n") - }); - let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| { - panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n") - }); - let mut pass = String::new(); - t!(t!(File::open(&file)).read_to_string(&mut pass)); - - let today = output(Command::new("date").arg("+%Y-%m-%d")); - - cmd.arg(sign); - cmd.arg(distdir(build)); - cmd.arg(today.trim()); - cmd.arg(build.rust_package_vers()); - cmd.arg(build.package_vers(&build.release_num("cargo"))); - cmd.arg(addr); - - t!(fs::create_dir_all(distdir(build))); - - let mut child = t!(cmd.stdin(Stdio::piped()).spawn()); - t!(child.stdin.take().unwrap().write_all(pass.as_bytes())); - let status = t!(child.wait()); - assert!(status.success()); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct HashSign; + +impl Step for HashSign { + type Output = (); + const ONLY_BUILD_TARGETS: bool = true; + const ONLY_HOSTS: bool = true; + const ONLY_BUILD: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("hash-and-sign") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(HashSign); + } + + fn run(self, builder: &Builder) { + let build = builder.build; + let mut cmd = builder.tool_cmd(Tool::BuildManifest); + let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| { + panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n") + }); + let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| { + panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n") + }); + let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| { + panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n") + }); + let mut pass = String::new(); + t!(t!(File::open(&file)).read_to_string(&mut pass)); + + let today = output(Command::new("date").arg("+%Y-%m-%d")); + + cmd.arg(sign); + cmd.arg(distdir(build)); + cmd.arg(today.trim()); + cmd.arg(build.rust_package_vers()); + cmd.arg(build.package_vers(&build.release_num("cargo"))); + cmd.arg(build.package_vers(&build.release_num("rls"))); + cmd.arg(addr); + + t!(fs::create_dir_all(distdir(build))); + + let mut child = t!(cmd.stdin(Stdio::piped()).spawn()); + t!(child.stdin.take().unwrap().write_all(pass.as_bytes())); + let status = t!(child.wait()); + assert!(status.success()); + } } diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 7dbc3e5553..86f5346bea 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -20,86 +20,276 @@ use std::fs::{self, File}; use std::io::prelude::*; use std::io; -use std::path::Path; -use std::process::Command; +use std::path::{PathBuf, Path}; -use {Build, Compiler, Mode}; -use util::{cp_r, symlink_dir}; +use Mode; use build_helper::up_to_date; -/// Invoke `rustbook` for `target` for the doc book `name`. -/// -/// This will not actually generate any documentation if the documentation has -/// already been generated. -pub fn rustbook(build: &Build, target: &str, name: &str) { - let src = build.src.join("src/doc"); - rustbook_src(build, target, name, &src); +use util::{cp_r, symlink_dir}; +use builder::{Builder, Compiler, RunConfig, ShouldRun, Step}; +use tool::Tool; +use compile; +use cache::{INTERNER, Interned}; + +macro_rules! book { + ($($name:ident, $path:expr, $book_name:expr;)+) => { + $( + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + pub struct $name { + target: Interned, + } + + impl Step for $name { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path($path).default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure($name { + target: run.target, + }); + } + + fn run(self, builder: &Builder) { + builder.ensure(Rustbook { + target: self.target, + name: INTERNER.intern_str($book_name), + }) + } + } + )+ + } } -/// Invoke `rustbook` for `target` for the doc book `name` from the `src` path. -/// -/// This will not actually generate any documentation if the documentation has -/// already been generated. -pub fn rustbook_src(build: &Build, target: &str, name: &str, src: &Path) { - let out = build.doc_out(target); - t!(fs::create_dir_all(&out)); - - let out = out.join(name); - let compiler = Compiler::new(0, &build.build); - let src = src.join(name); - let index = out.join("index.html"); - let rustbook = build.tool(&compiler, "rustbook"); - if up_to_date(&src, &index) && up_to_date(&rustbook, &index) { - return - } - println!("Rustbook ({}) - {}", target, name); - let _ = fs::remove_dir_all(&out); - build.run(build.tool_cmd(&compiler, "rustbook") - .arg("build") - .arg(&src) - .arg("-d") - .arg(out)); +book!( + Nomicon, "src/doc/book", "nomicon"; + Reference, "src/doc/reference", "reference"; + Rustdoc, "src/doc/rustdoc", "rustdoc"; +); + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +struct Rustbook { + target: Interned, + name: Interned, } -/// Build the book and associated stuff. -/// -/// We need to build: -/// -/// * Book (first edition) -/// * Book (second edition) -/// * Index page -/// * Redirect pages -pub fn book(build: &Build, target: &str, name: &str) { - // build book first edition - rustbook(build, target, &format!("{}/first-edition", name)); - - // build book second edition - rustbook(build, target, &format!("{}/second-edition", name)); - - // build the index page - let index = format!("{}/index.md", name); - println!("Documenting book index ({})", target); - invoke_rustdoc(build, target, &index); - - // build the redirect pages - println!("Documenting book redirect pages ({})", target); - for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) { - let file = t!(file); - let path = file.path(); - let path = path.to_str().unwrap(); - - invoke_rustdoc(build, target, path); +impl Step for Rustbook { + type Output = (); + + // rustbook is never directly called, and only serves as a shim for the nomicon and the + // reference. + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Invoke `rustbook` for `target` for the doc book `name`. + /// + /// This will not actually generate any documentation if the documentation has + /// already been generated. + fn run(self, builder: &Builder) { + let src = builder.build.src.join("src/doc"); + builder.ensure(RustbookSrc { + target: self.target, + name: self.name, + src: INTERNER.intern_path(src), + }); } } -fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) { - let out = build.doc_out(target); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct UnstableBook { + target: Interned, +} - let compiler = Compiler::new(0, &build.build); +impl Step for UnstableBook { + type Output = (); + const DEFAULT: bool = true; - let path = build.src.join("src/doc").join(markdown); + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/doc/unstable-book").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(UnstableBook { + target: run.target, + }); + } + + fn run(self, builder: &Builder) { + builder.ensure(UnstableBookGen { + target: self.target, + }); + builder.ensure(RustbookSrc { + target: self.target, + name: INTERNER.intern_str("unstable-book"), + src: builder.build.md_doc_out(self.target), + }) + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +struct RustbookSrc { + target: Interned, + name: Interned, + src: Interned, +} + +impl Step for RustbookSrc { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Invoke `rustbook` for `target` for the doc book `name` from the `src` path. + /// + /// This will not actually generate any documentation if the documentation has + /// already been generated. + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + let name = self.name; + let src = self.src; + let out = build.doc_out(target); + t!(fs::create_dir_all(&out)); + + let out = out.join(name); + let src = src.join(name); + let index = out.join("index.html"); + let rustbook = builder.tool_exe(Tool::Rustbook); + if up_to_date(&src, &index) && up_to_date(&rustbook, &index) { + return + } + println!("Rustbook ({}) - {}", target, name); + let _ = fs::remove_dir_all(&out); + build.run(builder.tool_cmd(Tool::Rustbook) + .arg("build") + .arg(&src) + .arg("-d") + .arg(out)); + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct TheBook { + compiler: Compiler, + target: Interned, + name: &'static str, +} - let rustdoc = build.rustdoc(&compiler); +impl Step for TheBook { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/doc/book").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(TheBook { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + target: run.target, + name: "book", + }); + } + + /// Build the book and associated stuff. + /// + /// We need to build: + /// + /// * Book (first edition) + /// * Book (second edition) + /// * Index page + /// * Redirect pages + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + let name = self.name; + // build book first edition + builder.ensure(Rustbook { + target, + name: INTERNER.intern_string(format!("{}/first-edition", name)), + }); + + // build book second edition + builder.ensure(Rustbook { + target, + name: INTERNER.intern_string(format!("{}/second-edition", name)), + }); + + // build the index page + let index = format!("{}/index.md", name); + println!("Documenting book index ({})", target); + invoke_rustdoc(builder, self.compiler, target, &index); + + // build the redirect pages + println!("Documenting book redirect pages ({})", target); + for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) { + let file = t!(file); + let path = file.path(); + let path = path.to_str().unwrap(); + + invoke_rustdoc(builder, self.compiler, target, path); + } + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct CargoBook { + target: Interned, +} + +impl Step for CargoBook { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/doc/cargo").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(CargoBook { + target: run.target, + }); + } + + /// Create a placeholder for the cargo documentation so that doc.rust-lang.org/cargo will + /// redirect to doc.crates.io. We want to publish doc.rust-lang.org/cargo in the paper + /// version of the book, but we don't want to rush the process of switching cargo's docs + /// over to mdbook and deploying them. When the cargo book is ready, this implementation + /// should build the mdbook instead of this redirect page. + fn run(self, builder: &Builder) { + let build = builder.build; + let out = build.doc_out(self.target); + + let cargo_dir = out.join("cargo"); + t!(fs::create_dir_all(&cargo_dir)); + + let index = cargo_dir.join("index.html"); + let redirect_html = r#" + + + + + "#; + + println!("Creating cargo book redirect page"); + t!(t!(File::create(&index)).write_all(redirect_html.as_bytes())); + } +} + +fn invoke_rustdoc(builder: &Builder, compiler: Compiler, target: Interned, markdown: &str) { + let build = builder.build; + let out = build.doc_out(target); + + let path = build.src.join("src/doc").join(markdown); let favicon = build.src.join("src/doc/favicon.inc"); let footer = build.src.join("src/doc/footer.inc"); @@ -116,9 +306,7 @@ fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) { t!(t!(File::create(&version_info)).write_all(info.as_bytes())); } - let mut cmd = Command::new(&rustdoc); - - build.add_rustc_lib_path(&compiler, &mut cmd); + let mut cmd = builder.rustdoc_cmd(compiler.host); let out = out.join("book"); @@ -137,242 +325,409 @@ fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) { build.run(&mut cmd); } -/// Generates all standalone documentation as compiled by the rustdoc in `stage` -/// for the `target` into `out`. -/// -/// This will list all of `src/doc` looking for markdown files and appropriately -/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and -/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized. -/// -/// In the end, this is just a glorified wrapper around rustdoc! -pub fn standalone(build: &Build, target: &str) { - println!("Documenting standalone ({})", target); - let out = build.doc_out(target); - t!(fs::create_dir_all(&out)); - - let compiler = Compiler::new(0, &build.build); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Standalone { + compiler: Compiler, + target: Interned, +} - let favicon = build.src.join("src/doc/favicon.inc"); - let footer = build.src.join("src/doc/footer.inc"); - let full_toc = build.src.join("src/doc/full-toc.inc"); - t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css"))); +impl Step for Standalone { + type Output = (); + const DEFAULT: bool = true; - let version_input = build.src.join("src/doc/version_info.html.template"); - let version_info = out.join("version_info.html"); + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/doc").default_condition(builder.build.config.docs) + } - if !up_to_date(&version_input, &version_info) { - let mut info = String::new(); - t!(t!(File::open(&version_input)).read_to_string(&mut info)); - let info = info.replace("VERSION", &build.rust_release()) - .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or("")) - .replace("STAMP", build.rust_info.sha().unwrap_or("")); - t!(t!(File::create(&version_info)).write_all(info.as_bytes())); + fn make_run(run: RunConfig) { + run.builder.ensure(Standalone { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + target: run.target, + }); } - for file in t!(fs::read_dir(build.src.join("src/doc"))) { - let file = t!(file); - let path = file.path(); - let filename = path.file_name().unwrap().to_str().unwrap(); - if !filename.ends_with(".md") || filename == "README.md" { - continue + /// Generates all standalone documentation as compiled by the rustdoc in `stage` + /// for the `target` into `out`. + /// + /// This will list all of `src/doc` looking for markdown files and appropriately + /// perform transformations like substituting `VERSION`, `SHORT_HASH`, and + /// `STAMP` along with providing the various header/footer HTML we've customized. + /// + /// In the end, this is just a glorified wrapper around rustdoc! + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + let compiler = self.compiler; + println!("Documenting standalone ({})", target); + let out = build.doc_out(target); + t!(fs::create_dir_all(&out)); + + let favicon = build.src.join("src/doc/favicon.inc"); + let footer = build.src.join("src/doc/footer.inc"); + let full_toc = build.src.join("src/doc/full-toc.inc"); + t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css"))); + + let version_input = build.src.join("src/doc/version_info.html.template"); + let version_info = out.join("version_info.html"); + + if !up_to_date(&version_input, &version_info) { + let mut info = String::new(); + t!(t!(File::open(&version_input)).read_to_string(&mut info)); + let info = info.replace("VERSION", &build.rust_release()) + .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or("")) + .replace("STAMP", build.rust_info.sha().unwrap_or("")); + t!(t!(File::create(&version_info)).write_all(info.as_bytes())); } - let html = out.join(filename).with_extension("html"); - let rustdoc = build.rustdoc(&compiler); - if up_to_date(&path, &html) && - up_to_date(&footer, &html) && - up_to_date(&favicon, &html) && - up_to_date(&full_toc, &html) && - up_to_date(&version_info, &html) && - up_to_date(&rustdoc, &html) { - continue + for file in t!(fs::read_dir(build.src.join("src/doc"))) { + let file = t!(file); + let path = file.path(); + let filename = path.file_name().unwrap().to_str().unwrap(); + if !filename.ends_with(".md") || filename == "README.md" { + continue + } + + let html = out.join(filename).with_extension("html"); + let rustdoc = builder.rustdoc(compiler.host); + if up_to_date(&path, &html) && + up_to_date(&footer, &html) && + up_to_date(&favicon, &html) && + up_to_date(&full_toc, &html) && + up_to_date(&version_info, &html) && + up_to_date(&rustdoc, &html) { + continue + } + + let mut cmd = builder.rustdoc_cmd(compiler.host); + cmd.arg("--html-after-content").arg(&footer) + .arg("--html-before-content").arg(&version_info) + .arg("--html-in-header").arg(&favicon) + .arg("--markdown-playground-url") + .arg("https://play.rust-lang.org/") + .arg("-o").arg(&out) + .arg(&path); + + if filename == "not_found.md" { + cmd.arg("--markdown-no-toc") + .arg("--markdown-css") + .arg("https://doc.rust-lang.org/rust.css"); + } else { + cmd.arg("--markdown-css").arg("rust.css"); + } + build.run(&mut cmd); } + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Std { + stage: u32, + target: Interned, +} + +impl Step for Std { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.krate("std").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Std { + stage: run.builder.top_stage, + target: run.target + }); + } - let mut cmd = Command::new(&rustdoc); - build.add_rustc_lib_path(&compiler, &mut cmd); - cmd.arg("--html-after-content").arg(&footer) - .arg("--html-before-content").arg(&version_info) - .arg("--html-in-header").arg(&favicon) - .arg("--markdown-playground-url") - .arg("https://play.rust-lang.org/") - .arg("-o").arg(&out) - .arg(&path); - - if filename == "not_found.md" { - cmd.arg("--markdown-no-toc") - .arg("--markdown-css") - .arg("https://doc.rust-lang.org/rust.css"); + /// Compile all standard library documentation. + /// + /// This will generate all documentation for the standard library and its + /// dependencies. This is largely just a wrapper around `cargo doc`. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let target = self.target; + println!("Documenting stage{} std ({})", stage, target); + let out = build.doc_out(target); + t!(fs::create_dir_all(&out)); + let compiler = builder.compiler(stage, build.build); + let rustdoc = builder.rustdoc(compiler.host); + let compiler = if build.force_use_stage1(compiler, target) { + builder.compiler(1, compiler.host) } else { - cmd.arg("--markdown-css").arg("rust.css"); + compiler + }; + + builder.ensure(compile::Std { compiler, target }); + let out_dir = build.stage_out(compiler, Mode::Libstd) + .join(target).join("doc"); + + // Here what we're doing is creating a *symlink* (directory junction on + // Windows) to the final output location. This is not done as an + // optimization but rather for correctness. We've got three trees of + // documentation, one for std, one for test, and one for rustc. It's then + // our job to merge them all together. + // + // Unfortunately rustbuild doesn't know nearly as well how to merge doc + // trees as rustdoc does itself, so instead of actually having three + // separate trees we just have rustdoc output to the same location across + // all of them. + // + // This way rustdoc generates output directly into the output, and rustdoc + // will also directly handle merging. + let my_out = build.crate_doc_out(target); + build.clear_if_dirty(&my_out, &rustdoc); + t!(symlink_dir_force(&my_out, &out_dir)); + + let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "doc"); + compile::std_cargo(build, &compiler, target, &mut cargo); + + // We don't want to build docs for internal std dependencies unless + // in compiler-docs mode. When not in that mode, we whitelist the crates + // for which docs must be built. + if !build.config.compiler_docs { + cargo.arg("--no-deps"); + for krate in &["alloc", "collections", "core", "std", "std_unicode"] { + cargo.arg("-p").arg(krate); + // Create all crate output directories first to make sure rustdoc uses + // relative links. + // FIXME: Cargo should probably do this itself. + t!(fs::create_dir_all(out_dir.join(krate))); + } } - build.run(&mut cmd); + + + build.run(&mut cargo); + cp_r(&my_out, &out); } } -/// Compile all standard library documentation. -/// -/// This will generate all documentation for the standard library and its -/// dependencies. This is largely just a wrapper around `cargo doc`. -pub fn std(build: &Build, stage: u32, target: &str) { - println!("Documenting stage{} std ({})", stage, target); - let out = build.doc_out(target); - t!(fs::create_dir_all(&out)); - let compiler = Compiler::new(stage, &build.build); - let compiler = if build.force_use_stage1(&compiler, target) { - Compiler::new(1, compiler.host) - } else { - compiler - }; - let out_dir = build.stage_out(&compiler, Mode::Libstd) - .join(target).join("doc"); - let rustdoc = build.rustdoc(&compiler); - - // Here what we're doing is creating a *symlink* (directory junction on - // Windows) to the final output location. This is not done as an - // optimization but rather for correctness. We've got three trees of - // documentation, one for std, one for test, and one for rustc. It's then - // our job to merge them all together. - // - // Unfortunately rustbuild doesn't know nearly as well how to merge doc - // trees as rustdoc does itself, so instead of actually having three - // separate trees we just have rustdoc output to the same location across - // all of them. - // - // This way rustdoc generates output directly into the output, and rustdoc - // will also directly handle merging. - let my_out = build.crate_doc_out(target); - build.clear_if_dirty(&my_out, &rustdoc); - t!(symlink_dir_force(&my_out, &out_dir)); - - let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc"); - cargo.arg("--manifest-path") - .arg(build.src.join("src/libstd/Cargo.toml")) - .arg("--features").arg(build.std_features()); - - // We don't want to build docs for internal std dependencies unless - // in compiler-docs mode. When not in that mode, we whitelist the crates - // for which docs must be built. - if !build.config.compiler_docs { - cargo.arg("--no-deps"); - for krate in &["alloc", "collections", "core", "std", "std_unicode"] { - cargo.arg("-p").arg(krate); - // Create all crate output directories first to make sure rustdoc uses - // relative links. - // FIXME: Cargo should probably do this itself. - t!(fs::create_dir_all(out_dir.join(krate))); - } +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Test { + stage: u32, + target: Interned, +} + +impl Step for Test { + type Output = (); + const DEFAULT: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.krate("test").default_condition(builder.config.compiler_docs) } + fn make_run(run: RunConfig) { + run.builder.ensure(Test { + stage: run.builder.top_stage, + target: run.target, + }); + } + + /// Compile all libtest documentation. + /// + /// This will generate all documentation for libtest and its dependencies. This + /// is largely just a wrapper around `cargo doc`. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let target = self.target; + println!("Documenting stage{} test ({})", stage, target); + let out = build.doc_out(target); + t!(fs::create_dir_all(&out)); + let compiler = builder.compiler(stage, build.build); + let rustdoc = builder.rustdoc(compiler.host); + let compiler = if build.force_use_stage1(compiler, target) { + builder.compiler(1, compiler.host) + } else { + compiler + }; + + // Build libstd docs so that we generate relative links + builder.ensure(Std { stage, target }); - build.run(&mut cargo); - cp_r(&my_out, &out); + builder.ensure(compile::Test { compiler, target }); + let out_dir = build.stage_out(compiler, Mode::Libtest) + .join(target).join("doc"); + + // See docs in std above for why we symlink + let my_out = build.crate_doc_out(target); + build.clear_if_dirty(&my_out, &rustdoc); + t!(symlink_dir_force(&my_out, &out_dir)); + + let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "doc"); + compile::test_cargo(build, &compiler, target, &mut cargo); + build.run(&mut cargo); + cp_r(&my_out, &out); + } } -/// Compile all libtest documentation. -/// -/// This will generate all documentation for libtest and its dependencies. This -/// is largely just a wrapper around `cargo doc`. -pub fn test(build: &Build, stage: u32, target: &str) { - println!("Documenting stage{} test ({})", stage, target); - let out = build.doc_out(target); - t!(fs::create_dir_all(&out)); - let compiler = Compiler::new(stage, &build.build); - let compiler = if build.force_use_stage1(&compiler, target) { - Compiler::new(1, compiler.host) - } else { - compiler - }; - let out_dir = build.stage_out(&compiler, Mode::Libtest) - .join(target).join("doc"); - let rustdoc = build.rustdoc(&compiler); - - // See docs in std above for why we symlink - let my_out = build.crate_doc_out(target); - build.clear_if_dirty(&my_out, &rustdoc); - t!(symlink_dir_force(&my_out, &out_dir)); - - let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc"); - cargo.arg("--manifest-path") - .arg(build.src.join("src/libtest/Cargo.toml")); - build.run(&mut cargo); - cp_r(&my_out, &out); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rustc { + stage: u32, + target: Interned, } -/// Generate all compiler documentation. -/// -/// This will generate all documentation for the compiler libraries and their -/// dependencies. This is largely just a wrapper around `cargo doc`. -pub fn rustc(build: &Build, stage: u32, target: &str) { - println!("Documenting stage{} compiler ({})", stage, target); - let out = build.doc_out(target); - t!(fs::create_dir_all(&out)); - let compiler = Compiler::new(stage, &build.build); - let compiler = if build.force_use_stage1(&compiler, target) { - Compiler::new(1, compiler.host) - } else { - compiler - }; - let out_dir = build.stage_out(&compiler, Mode::Librustc) - .join(target).join("doc"); - let rustdoc = build.rustdoc(&compiler); - - // See docs in std above for why we symlink - let my_out = build.crate_doc_out(target); - build.clear_if_dirty(&my_out, &rustdoc); - t!(symlink_dir_force(&my_out, &out_dir)); - - let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc"); - cargo.arg("--manifest-path") - .arg(build.src.join("src/rustc/Cargo.toml")) - .arg("--features").arg(build.rustc_features()); - - if build.config.compiler_docs { - // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc - // which would otherwise overwrite the docs for the real rustc and - // rustdoc lib crates. - cargo.arg("-p").arg("rustc_driver") - .arg("-p").arg("rustdoc"); - } else { - // Like with libstd above if compiler docs aren't enabled then we're not - // documenting internal dependencies, so we have a whitelist. - cargo.arg("--no-deps"); - for krate in &["proc_macro"] { - cargo.arg("-p").arg(krate); +impl Step for Rustc { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.krate("rustc-main").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rustc { + stage: run.builder.top_stage, + target: run.target, + }); + } + + /// Generate all compiler documentation. + /// + /// This will generate all documentation for the compiler libraries and their + /// dependencies. This is largely just a wrapper around `cargo doc`. + fn run(self, builder: &Builder) { + let build = builder.build; + let stage = self.stage; + let target = self.target; + println!("Documenting stage{} compiler ({})", stage, target); + let out = build.doc_out(target); + t!(fs::create_dir_all(&out)); + let compiler = builder.compiler(stage, build.build); + let rustdoc = builder.rustdoc(compiler.host); + let compiler = if build.force_use_stage1(compiler, target) { + builder.compiler(1, compiler.host) + } else { + compiler + }; + + // Build libstd docs so that we generate relative links + builder.ensure(Std { stage, target }); + + builder.ensure(compile::Rustc { compiler, target }); + let out_dir = build.stage_out(compiler, Mode::Librustc) + .join(target).join("doc"); + + // See docs in std above for why we symlink + let my_out = build.crate_doc_out(target); + build.clear_if_dirty(&my_out, &rustdoc); + t!(symlink_dir_force(&my_out, &out_dir)); + + let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc"); + compile::rustc_cargo(build, &compiler, target, &mut cargo); + + if build.config.compiler_docs { + // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc + // which would otherwise overwrite the docs for the real rustc and + // rustdoc lib crates. + cargo.arg("-p").arg("rustc_driver") + .arg("-p").arg("rustdoc"); + } else { + // Like with libstd above if compiler docs aren't enabled then we're not + // documenting internal dependencies, so we have a whitelist. + cargo.arg("--no-deps"); + for krate in &["proc_macro"] { + cargo.arg("-p").arg(krate); + } } + + build.run(&mut cargo); + cp_r(&my_out, &out); } +} - build.run(&mut cargo); - cp_r(&my_out, &out); +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct ErrorIndex { + target: Interned, } -/// Generates the HTML rendered error-index by running the -/// `error_index_generator` tool. -pub fn error_index(build: &Build, target: &str) { - println!("Documenting error index ({})", target); - let out = build.doc_out(target); - t!(fs::create_dir_all(&out)); - let compiler = Compiler::new(0, &build.build); - let mut index = build.tool_cmd(&compiler, "error_index_generator"); - index.arg("html"); - index.arg(out.join("error-index.html")); +impl Step for ErrorIndex { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/tools/error_index_generator").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(ErrorIndex { + target: run.target, + }); + } + + /// Generates the HTML rendered error-index by running the + /// `error_index_generator` tool. + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; - // FIXME: shouldn't have to pass this env var - index.env("CFG_BUILD", &build.build); + builder.ensure(compile::Rustc { + compiler: builder.compiler(0, build.build), + target, + }); - build.run(&mut index); + println!("Documenting error index ({})", target); + let out = build.doc_out(target); + t!(fs::create_dir_all(&out)); + let mut index = builder.tool_cmd(Tool::ErrorIndex); + index.arg("html"); + index.arg(out.join("error-index.html")); + + // FIXME: shouldn't have to pass this env var + index.env("CFG_BUILD", &build.build); + + build.run(&mut index); + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct UnstableBookGen { + target: Interned, } -pub fn unstable_book_gen(build: &Build, target: &str) { - println!("Generating unstable book md files ({})", target); - let out = build.md_doc_out(target).join("unstable-book"); - t!(fs::create_dir_all(&out)); - t!(fs::remove_dir_all(&out)); - let compiler = Compiler::new(0, &build.build); - let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen"); - cmd.arg(build.src.join("src")); - cmd.arg(out); +impl Step for UnstableBookGen { + type Output = (); + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; - build.run(&mut cmd); + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/tools/unstable-book-gen").default_condition(builder.build.config.docs) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(UnstableBookGen { + target: run.target, + }); + } + + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + + builder.ensure(compile::Std { + compiler: builder.compiler(builder.top_stage, build.build), + target, + }); + + println!("Generating unstable book md files ({})", target); + let out = build.md_doc_out(target).join("unstable-book"); + t!(fs::create_dir_all(&out)); + t!(fs::remove_dir_all(&out)); + let mut cmd = builder.tool_cmd(Tool::UnstableBookGen); + cmd.arg(build.src.join("src")); + cmd.arg(out); + + build.run(&mut cmd); + } } fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> { diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 5804df34e8..a84d43d3de 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -23,7 +23,9 @@ use getopts::Options; use Build; use config::Config; use metadata; -use step; +use builder::Builder; + +use cache::{Interned, INTERNER}; /// Deserialized version of all flags for this compile. pub struct Flags { @@ -31,9 +33,10 @@ pub struct Flags { pub on_fail: Option, pub stage: Option, pub keep_stage: Option, - pub build: String, - pub host: Vec, - pub target: Vec, + pub build: Option>, + + pub host: Vec>, + pub target: Vec>, pub config: Option, pub src: PathBuf, pub jobs: Option, @@ -66,6 +69,14 @@ pub enum Subcommand { }, } +impl Default for Subcommand { + fn default() -> Subcommand { + Subcommand::Build { + paths: vec![PathBuf::from("nowhere")], + } + } +} + impl Flags { pub fn parse(args: &[String]) -> Flags { let mut extra_help = String::new(); @@ -241,15 +252,12 @@ Arguments: // All subcommands can have an optional "Available paths" section if matches.opt_present("verbose") { - let flags = Flags::parse(&["build".to_string()]); - let mut config = Config::parse(&flags.build, cfg_file.clone()); - config.build = flags.build.clone(); - let mut build = Build::new(flags, config); + let config = Config::parse(&["build".to_string()]); + let mut build = Build::new(config); metadata::build(&mut build); - let maybe_rules_help = step::build_rules(&build).get_help(subcommand); - if maybe_rules_help.is_some() { - extra_help.push_str(maybe_rules_help.unwrap().as_str()); - } + + let maybe_rules_help = Builder::get_help(&build, subcommand.as_str()); + extra_help.push_str(maybe_rules_help.unwrap_or_default().as_str()); } else { extra_help.push_str(format!("Run `./x.py {} -h -v` to see a list of available paths.", subcommand).as_str()); @@ -266,14 +274,14 @@ Arguments: } "test" => { Subcommand::Test { - paths: paths, + paths, test_args: matches.opt_strs("test-args"), fail_fast: !matches.opt_present("no-fail-fast"), } } "bench" => { Subcommand::Bench { - paths: paths, + paths, test_args: matches.opt_strs("test-args"), } } @@ -289,12 +297,12 @@ Arguments: } "dist" => { Subcommand::Dist { - paths: paths, + paths, } } "install" => { Subcommand::Install { - paths: paths, + paths, } } _ => { @@ -316,18 +324,18 @@ Arguments: Flags { verbose: matches.opt_count("verbose"), - stage: stage, + stage, on_fail: matches.opt_str("on-fail"), keep_stage: matches.opt_str("keep-stage").map(|j| j.parse().unwrap()), - build: matches.opt_str("build").unwrap_or_else(|| { - env::var("BUILD").unwrap() - }), - host: split(matches.opt_strs("host")), - target: split(matches.opt_strs("target")), + build: matches.opt_str("build").map(|s| INTERNER.intern_string(s)), + host: split(matches.opt_strs("host")) + .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(), + target: split(matches.opt_strs("target")) + .into_iter().map(|x| INTERNER.intern_string(x)).collect::>(), config: cfg_file, - src: src, + src, jobs: matches.opt_str("jobs").map(|j| j.parse().unwrap()), - cmd: cmd, + cmd, incremental: matches.opt_present("incremental"), } } diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs index 8e2ef527b1..608924c9c2 100644 --- a/src/bootstrap/install.rs +++ b/src/bootstrap/install.rs @@ -18,121 +18,99 @@ use std::fs; use std::path::{Path, PathBuf, Component}; use std::process::Command; -use Build; -use dist::{pkgname, sanitize_sh, tmpdir}; - -pub struct Installer<'a> { - build: &'a Build, - prefix: PathBuf, - sysconfdir: PathBuf, - docdir: PathBuf, - bindir: PathBuf, - libdir: PathBuf, - mandir: PathBuf, - empty_dir: PathBuf, -} +use dist::{self, pkgname, sanitize_sh, tmpdir}; -impl<'a> Drop for Installer<'a> { - fn drop(&mut self) { - t!(fs::remove_dir_all(&self.empty_dir)); - } -} +use builder::{Builder, RunConfig, ShouldRun, Step}; +use cache::Interned; -impl<'a> Installer<'a> { - pub fn new(build: &'a Build) -> Installer<'a> { - let prefix_default = PathBuf::from("/usr/local"); - let sysconfdir_default = PathBuf::from("/etc"); - let docdir_default = PathBuf::from("share/doc/rust"); - let bindir_default = PathBuf::from("bin"); - let libdir_default = PathBuf::from("lib"); - let mandir_default = PathBuf::from("share/man"); - let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default); - let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default); - let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default); - let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default); - let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default); - let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default); - - let sysconfdir = prefix.join(sysconfdir); - let docdir = prefix.join(docdir); - let bindir = prefix.join(bindir); - let libdir = prefix.join(libdir); - let mandir = prefix.join(mandir); - - let destdir = env::var_os("DESTDIR").map(PathBuf::from); - - let prefix = add_destdir(&prefix, &destdir); - let sysconfdir = add_destdir(&sysconfdir, &destdir); - let docdir = add_destdir(&docdir, &destdir); - let bindir = add_destdir(&bindir, &destdir); - let libdir = add_destdir(&libdir, &destdir); - let mandir = add_destdir(&mandir, &destdir); - - let empty_dir = build.out.join("tmp/empty_dir"); - - t!(fs::create_dir_all(&empty_dir)); - - Installer { - build, - prefix, - sysconfdir, - docdir, - bindir, - libdir, - mandir, - empty_dir, - } - } +pub fn install_docs(builder: &Builder, stage: u32, host: Interned) { + install_sh(builder, "docs", "rust-docs", stage, Some(host)); +} - pub fn install_docs(&self, stage: u32, host: &str) { - self.install_sh("docs", "rust-docs", stage, Some(host)); +pub fn install_std(builder: &Builder, stage: u32) { + for target in &builder.build.targets { + install_sh(builder, "std", "rust-std", stage, Some(*target)); } +} - pub fn install_std(&self, stage: u32) { - for target in self.build.config.target.iter() { - self.install_sh("std", "rust-std", stage, Some(target)); - } - } +pub fn install_cargo(builder: &Builder, stage: u32, host: Interned) { + install_sh(builder, "cargo", "cargo", stage, Some(host)); +} - pub fn install_cargo(&self, stage: u32, host: &str) { - self.install_sh("cargo", "cargo", stage, Some(host)); - } +pub fn install_rls(builder: &Builder, stage: u32, host: Interned) { + install_sh(builder, "rls", "rls", stage, Some(host)); +} - pub fn install_rls(&self, stage: u32, host: &str) { - self.install_sh("rls", "rls", stage, Some(host)); - } +pub fn install_analysis(builder: &Builder, stage: u32, host: Interned) { + install_sh(builder, "analysis", "rust-analysis", stage, Some(host)); +} - pub fn install_analysis(&self, stage: u32, host: &str) { - self.install_sh("analysis", "rust-analysis", stage, Some(host)); - } +pub fn install_src(builder: &Builder, stage: u32) { + install_sh(builder, "src", "rust-src", stage, None); +} +pub fn install_rustc(builder: &Builder, stage: u32, host: Interned) { + install_sh(builder, "rustc", "rustc", stage, Some(host)); +} - pub fn install_src(&self, stage: u32) { - self.install_sh("src", "rust-src", stage, None); - } - pub fn install_rustc(&self, stage: u32, host: &str) { - self.install_sh("rustc", "rustc", stage, Some(host)); - } +fn install_sh( + builder: &Builder, + package: &str, + name: &str, + stage: u32, + host: Option> +) { + let build = builder.build; + println!("Install {} stage{} ({:?})", package, stage, host); + + let prefix_default = PathBuf::from("/usr/local"); + let sysconfdir_default = PathBuf::from("/etc"); + let docdir_default = PathBuf::from("share/doc/rust"); + let bindir_default = PathBuf::from("bin"); + let libdir_default = PathBuf::from("lib"); + let mandir_default = PathBuf::from("share/man"); + let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default); + let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default); + let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default); + let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default); + let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default); + let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default); + + let sysconfdir = prefix.join(sysconfdir); + let docdir = prefix.join(docdir); + let bindir = prefix.join(bindir); + let libdir = prefix.join(libdir); + let mandir = prefix.join(mandir); + + let destdir = env::var_os("DESTDIR").map(PathBuf::from); + + let prefix = add_destdir(&prefix, &destdir); + let sysconfdir = add_destdir(&sysconfdir, &destdir); + let docdir = add_destdir(&docdir, &destdir); + let bindir = add_destdir(&bindir, &destdir); + let libdir = add_destdir(&libdir, &destdir); + let mandir = add_destdir(&mandir, &destdir); + + let empty_dir = build.out.join("tmp/empty_dir"); + + t!(fs::create_dir_all(&empty_dir)); + let package_name = if let Some(host) = host { + format!("{}-{}", pkgname(build, name), host) + } else { + pkgname(build, name) + }; - fn install_sh(&self, package: &str, name: &str, stage: u32, host: Option<&str>) { - println!("Install {} stage{} ({:?})", package, stage, host); - let package_name = if let Some(host) = host { - format!("{}-{}", pkgname(self.build, name), host) - } else { - pkgname(self.build, name) - }; - - let mut cmd = Command::new("sh"); - cmd.current_dir(&self.empty_dir) - .arg(sanitize_sh(&tmpdir(self.build).join(&package_name).join("install.sh"))) - .arg(format!("--prefix={}", sanitize_sh(&self.prefix))) - .arg(format!("--sysconfdir={}", sanitize_sh(&self.sysconfdir))) - .arg(format!("--docdir={}", sanitize_sh(&self.docdir))) - .arg(format!("--bindir={}", sanitize_sh(&self.bindir))) - .arg(format!("--libdir={}", sanitize_sh(&self.libdir))) - .arg(format!("--mandir={}", sanitize_sh(&self.mandir))) - .arg("--disable-ldconfig"); - self.build.run(&mut cmd); - } + let mut cmd = Command::new("sh"); + cmd.current_dir(&empty_dir) + .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh"))) + .arg(format!("--prefix={}", sanitize_sh(&prefix))) + .arg(format!("--sysconfdir={}", sanitize_sh(&sysconfdir))) + .arg(format!("--docdir={}", sanitize_sh(&docdir))) + .arg(format!("--bindir={}", sanitize_sh(&bindir))) + .arg(format!("--libdir={}", sanitize_sh(&libdir))) + .arg(format!("--mandir={}", sanitize_sh(&mandir))) + .arg("--disable-ldconfig"); + build.run(&mut cmd); + t!(fs::remove_dir_all(&empty_dir)); } fn add_destdir(path: &Path, destdir: &Option) -> PathBuf { @@ -148,3 +126,84 @@ fn add_destdir(path: &Path, destdir: &Option) -> PathBuf { } ret } + +macro_rules! install { + (($sel:ident, $builder:ident, $_config:ident), + $($name:ident, + $path:expr, + $default_cond:expr, + only_hosts: $only_hosts:expr, + $run_item:block $(, $c:ident)*;)+) => { + $( + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + pub struct $name { + pub stage: u32, + pub target: Interned, + pub host: Interned, + } + + impl Step for $name { + type Output = (); + const DEFAULT: bool = true; + const ONLY_BUILD_TARGETS: bool = true; + const ONLY_HOSTS: bool = $only_hosts; + $(const $c: bool = true;)* + + fn should_run(run: ShouldRun) -> ShouldRun { + let $_config = &run.builder.config; + run.path($path).default_condition($default_cond) + } + + fn make_run(run: RunConfig) { + run.builder.ensure($name { + stage: run.builder.top_stage, + target: run.target, + host: run.host, + }); + } + + fn run($sel, $builder: &Builder) { + $run_item + } + })+ + } +} + +install!((self, builder, _config), + Docs, "src/doc", _config.docs, only_hosts: false, { + builder.ensure(dist::Docs { stage: self.stage, host: self.target }); + install_docs(builder, self.stage, self.target); + }; + Std, "src/libstd", true, only_hosts: true, { + builder.ensure(dist::Std { + compiler: builder.compiler(self.stage, self.host), + target: self.target + }); + install_std(builder, self.stage); + }; + Cargo, "cargo", _config.extended, only_hosts: true, { + builder.ensure(dist::Cargo { stage: self.stage, target: self.target }); + install_cargo(builder, self.stage, self.target); + }; + Rls, "rls", _config.extended, only_hosts: true, { + builder.ensure(dist::Rls { stage: self.stage, target: self.target }); + install_rls(builder, self.stage, self.target); + }; + Analysis, "analysis", _config.extended, only_hosts: false, { + builder.ensure(dist::Analysis { + compiler: builder.compiler(self.stage, self.host), + target: self.target + }); + install_analysis(builder, self.stage, self.target); + }; + Src, "src", _config.extended, only_hosts: true, { + builder.ensure(dist::Src); + install_src(builder, self.stage); + }, ONLY_BUILD; + Rustc, "src/librustc", true, only_hosts: true, { + builder.ensure(dist::Rustc { + compiler: builder.compiler(self.stage, self.target), + }); + install_rustc(builder, self.stage, self.target); + }; +); diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 032231582e..84a9e56b64 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -23,38 +23,87 @@ //! //! ## Architecture //! -//! Although this build system defers most of the complicated logic to Cargo -//! itself, it still needs to maintain a list of targets and dependencies which -//! it can itself perform. Rustbuild is made up of a list of rules with -//! dependencies amongst them (created in the `step` module) and then knows how -//! to execute each in sequence. Each time rustbuild is invoked, it will simply -//! iterate through this list of steps and execute each serially in turn. For -//! each step rustbuild relies on the step internally being incremental and +//! The build system defers most of the complicated logic managing invocations +//! of rustc and rustdoc to Cargo itself. However, moving through various stages +//! and copying artifacts is still necessary for it to do. Each time rustbuild +//! is invoked, it will iterate through the list of predefined steps and execute +//! each serially in turn if it matches the paths passed or is a default rule. +//! For each step rustbuild relies on the step internally being incremental and //! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded //! to appropriate test harnesses and such. //! //! Most of the "meaty" steps that matter are backed by Cargo, which does indeed //! have its own parallelism and incremental management. Later steps, like //! tests, aren't incremental and simply run the entire suite currently. +//! However, compiletest itself tries to avoid running tests when the artifacts +//! that are involved (mainly the compiler) haven't changed. //! //! When you execute `x.py build`, the steps which are executed are: //! //! * First, the python script is run. This will automatically download the -//! stage0 rustc and cargo according to `src/stage0.txt`, or using the cached +//! stage0 rustc and cargo according to `src/stage0.txt`, or use the cached //! versions if they're available. These are then used to compile rustbuild //! itself (using Cargo). Finally, control is then transferred to rustbuild. //! //! * Rustbuild takes over, performs sanity checks, probes the environment, -//! reads configuration, builds up a list of steps, and then starts executing -//! them. +//! reads configuration, and starts executing steps as it reads the command +//! line arguments (paths) or going through the default rules. //! -//! * The stage0 libstd is compiled -//! * The stage0 libtest is compiled -//! * The stage0 librustc is compiled -//! * The stage1 compiler is assembled -//! * The stage1 libstd, libtest, librustc are compiled -//! * The stage2 compiler is assembled -//! * The stage2 libstd, libtest, librustc are compiled +//! The build output will be something like the following: +//! +//! Building stage0 std artifacts +//! Copying stage0 std +//! Building stage0 test artifacts +//! Copying stage0 test +//! Building stage0 compiler artifacts +//! Copying stage0 rustc +//! Assembling stage1 compiler +//! Building stage1 std artifacts +//! Copying stage1 std +//! Building stage1 test artifacts +//! Copying stage1 test +//! Building stage1 compiler artifacts +//! Copying stage1 rustc +//! Assembling stage2 compiler +//! Uplifting stage1 std +//! Uplifting stage1 test +//! Uplifting stage1 rustc +//! +//! Let's disect that a little: +//! +//! ## Building stage0 {std,test,compiler} artifacts +//! +//! These steps use the provided (downloaded, usually) compiler to compile the +//! local Rust source into libraries we can use. +//! +//! ## Copying stage0 {std,test,rustc} +//! +//! This copies the build output from Cargo into +//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: This step's +//! documentation should be expanded -- the information already here may be +//! incorrect. +//! +//! ## Assembling stage1 compiler +//! +//! This copies the libraries we built in "building stage0 ... artifacts" into +//! the stage1 compiler's lib directory. These are the host libraries that the +//! compiler itself uses to run. These aren't actually used by artifacts the new +//! compiler generates. This step also copies the rustc and rustdoc binaries we +//! generated into build/$HOST/stage/bin. +//! +//! The stage1/bin/rustc is a fully functional compiler, but it doesn't yet have +//! any libraries to link built binaries or libraries to. The next 3 steps will +//! provide those libraries for it; they are mostly equivalent to constructing +//! the stage1/bin compiler so we don't go through them individually. +//! +//! ## Uplifting stage1 {std,test,rustc} +//! +//! This step copies the libraries from the stage1 compiler sysroot into the +//! stage2 compiler. This is done to avoid rebuilding the compiler; libraries +//! we'd build in this step should be identical (in function, if not necessarily +//! identical on disk) so there's no need to recompile the compiler again. Note +//! that if you want to, you can enable the full-bootstrap option to change this +//! behavior. //! //! Each step is driven by a separate Cargo project and rustbuild orchestrates //! copying files between steps and otherwise preparing for Cargo to run. @@ -65,33 +114,38 @@ //! also check out the `src/bootstrap/README.md` file for more information. #![deny(warnings)] +#![allow(stable_features)] +#![feature(associated_consts)] #[macro_use] extern crate build_helper; +#[macro_use] +extern crate serde_derive; +#[macro_use] +extern crate lazy_static; +extern crate serde_json; extern crate cmake; extern crate filetime; extern crate gcc; extern crate getopts; extern crate num_cpus; -extern crate rustc_serialize; extern crate toml; #[cfg(unix)] extern crate libc; use std::cell::Cell; -use std::cmp; -use std::collections::HashMap; +use std::collections::{HashSet, HashMap}; use std::env; -use std::ffi::OsString; use std::fs::{self, File}; use std::io::Read; use std::path::{PathBuf, Path}; use std::process::Command; +use std::slice; use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime}; -use util::{exe, libdir, add_lib_path, OutputFolder, CiEnv}; +use util::{exe, libdir, OutputFolder, CiEnv}; mod cc; mod channel; @@ -106,8 +160,10 @@ mod flags; mod install; mod native; mod sanity; -mod step; pub mod util; +mod builder; +mod cache; +mod tool; #[cfg(windows)] mod job; @@ -130,7 +186,8 @@ mod job { } pub use config::Config; -pub use flags::{Flags, Subcommand}; +use flags::Subcommand; +use cache::{Interned, INTERNER}; /// A structure representing a Rust compiler. /// @@ -138,9 +195,9 @@ pub use flags::{Flags, Subcommand}; /// corresponds to the platform the compiler runs on. This structure is used as /// a parameter to many methods below. #[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)] -pub struct Compiler<'a> { +pub struct Compiler { stage: u32, - host: &'a str, + host: Interned, } /// Global configuration for the build system. @@ -157,9 +214,6 @@ pub struct Build { // User-specified configuration via config.toml config: Config, - // User-specified configuration via CLI flags - flags: Flags, - // Derived properties from the above two configurations src: PathBuf, out: PathBuf, @@ -171,9 +225,9 @@ pub struct Build { verbosity: usize, // Targets for which to build. - build: String, - hosts: Vec, - targets: Vec, + build: Interned, + hosts: Vec>, + targets: Vec>, // Stage 0 (downloaded) compiler and cargo or their local rust equivalents. initial_rustc: PathBuf, @@ -185,10 +239,10 @@ pub struct Build { // Runtime state filled in later on // target -> (cc, ar) - cc: HashMap)>, + cc: HashMap, (gcc::Tool, Option)>, // host -> (cc, ar) - cxx: HashMap, - crates: HashMap, + cxx: HashMap, gcc::Tool>, + crates: HashMap, Crate>, is_sudo: bool, ci_env: CiEnv, delayed_failures: Cell, @@ -196,9 +250,9 @@ pub struct Build { #[derive(Debug)] struct Crate { - name: String, + name: Interned, version: String, - deps: Vec, + deps: Vec>, path: PathBuf, doc_step: String, build_step: String, @@ -210,7 +264,7 @@ struct Crate { /// /// These entries currently correspond to the various output directories of the /// build system, with each mod generating output in a different directory. -#[derive(Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)] pub enum Mode { /// Build the standard library, placing output in the "stageN-std" directory. Libstd, @@ -230,9 +284,9 @@ impl Build { /// line and the filesystem `config`. /// /// By default all build output will be placed in the current directory. - pub fn new(flags: Flags, config: Config) -> Build { + pub fn new(config: Config) -> Build { let cwd = t!(env::current_dir()); - let src = flags.src.clone(); + let src = config.src.clone(); let out = cwd.join("build"); let is_sudo = match env::var_os("SUDO_USER") { @@ -244,64 +298,42 @@ impl Build { } None => false, }; - let rust_info = channel::GitInfo::new(&src); - let cargo_info = channel::GitInfo::new(&src.join("src/tools/cargo")); - let rls_info = channel::GitInfo::new(&src.join("src/tools/rls")); - - let hosts = if !flags.host.is_empty() { - for host in flags.host.iter() { - if !config.host.contains(host) { - panic!("specified host `{}` is not in configuration", host); - } - } - flags.host.clone() - } else { - config.host.clone() - }; - let targets = if !flags.target.is_empty() { - for target in flags.target.iter() { - if !config.target.contains(target) { - panic!("specified target `{}` is not in configuration", target); - } - } - flags.target.clone() - } else { - config.target.clone() - }; + let rust_info = channel::GitInfo::new(&config, &src); + let cargo_info = channel::GitInfo::new(&config, &src.join("src/tools/cargo")); + let rls_info = channel::GitInfo::new(&config, &src.join("src/tools/rls")); Build { initial_rustc: config.initial_rustc.clone(), initial_cargo: config.initial_cargo.clone(), local_rebuild: config.local_rebuild, - fail_fast: flags.cmd.fail_fast(), - verbosity: cmp::max(flags.verbose, config.verbose), + fail_fast: config.cmd.fail_fast(), + verbosity: config.verbose, - build: config.host[0].clone(), - hosts: hosts, - targets: targets, + build: config.build, + hosts: config.hosts.clone(), + targets: config.targets.clone(), - flags: flags, - config: config, - src: src, - out: out, + config, + src, + out, - rust_info: rust_info, - cargo_info: cargo_info, - rls_info: rls_info, + rust_info, + cargo_info, + rls_info, cc: HashMap::new(), cxx: HashMap::new(), crates: HashMap::new(), lldb_version: None, lldb_python_dir: None, - is_sudo: is_sudo, + is_sudo, ci_env: CiEnv::current(), delayed_failures: Cell::new(0), } } - fn build_slice(&self) -> &[String] { + pub fn build_triple(&self) -> &[Interned] { unsafe { - std::slice::from_raw_parts(&self.build, 1) + slice::from_raw_parts(&self.build, 1) } } @@ -311,7 +343,7 @@ impl Build { job::setup(self); } - if let Subcommand::Clean = self.flags.cmd { + if let Subcommand::Clean = self.config.cmd { return clean::clean(self); } @@ -333,7 +365,7 @@ impl Build { self.verbose("learning about cargo"); metadata::build(self); - step::run(self); + builder::Builder::run(&self); } /// Clear out `dir` if `input` is newer. @@ -351,242 +383,6 @@ impl Build { t!(File::create(stamp)); } - /// Prepares an invocation of `cargo` to be run. - /// - /// This will create a `Command` that represents a pending execution of - /// Cargo. This cargo will be configured to use `compiler` as the actual - /// rustc compiler, its output will be scoped by `mode`'s output directory, - /// it will pass the `--target` flag for the specified `target`, and will be - /// executing the Cargo command `cmd`. - fn cargo(&self, - compiler: &Compiler, - mode: Mode, - target: &str, - cmd: &str) -> Command { - let mut cargo = Command::new(&self.initial_cargo); - let out_dir = self.stage_out(compiler, mode); - cargo.env("CARGO_TARGET_DIR", out_dir) - .arg(cmd) - .arg("-j").arg(self.jobs().to_string()) - .arg("--target").arg(target); - - // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005 - // Force cargo to output binaries with disambiguating hashes in the name - cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel); - - let stage; - if compiler.stage == 0 && self.local_rebuild { - // Assume the local-rebuild rustc already has stage1 features. - stage = 1; - } else { - stage = compiler.stage; - } - - // Customize the compiler we're running. Specify the compiler to cargo - // as our shim and then pass it some various options used to configure - // how the actual compiler itself is called. - // - // These variables are primarily all read by - // src/bootstrap/bin/{rustc.rs,rustdoc.rs} - cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) - .env("RUSTC", self.out.join("bootstrap/debug/rustc")) - .env("RUSTC_REAL", self.compiler_path(compiler)) - .env("RUSTC_STAGE", stage.to_string()) - .env("RUSTC_CODEGEN_UNITS", - self.config.rust_codegen_units.to_string()) - .env("RUSTC_DEBUG_ASSERTIONS", - self.config.rust_debug_assertions.to_string()) - .env("RUSTC_SYSROOT", self.sysroot(compiler)) - .env("RUSTC_LIBDIR", self.rustc_libdir(compiler)) - .env("RUSTC_RPATH", self.config.rust_rpath.to_string()) - .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc")) - .env("RUSTDOC_REAL", self.rustdoc(compiler)) - .env("RUSTC_FLAGS", self.rustc_flags(target).join(" ")); - - if mode != Mode::Tool { - // Tools don't get debuginfo right now, e.g. cargo and rls don't - // get compiled with debuginfo. - cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string()) - .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string()) - .env("RUSTC_FORCE_UNSTABLE", "1"); - - // Currently the compiler depends on crates from crates.io, and - // then other crates can depend on the compiler (e.g. proc-macro - // crates). Let's say, for example that rustc itself depends on the - // bitflags crate. If an external crate then depends on the - // bitflags crate as well, we need to make sure they don't - // conflict, even if they pick the same verison of bitflags. We'll - // want to make sure that e.g. a plugin and rustc each get their - // own copy of bitflags. - - // Cargo ensures that this works in general through the -C metadata - // flag. This flag will frob the symbols in the binary to make sure - // they're different, even though the source code is the exact - // same. To solve this problem for the compiler we extend Cargo's - // already-passed -C metadata flag with our own. Our rustc.rs - // wrapper around the actual rustc will detect -C metadata being - // passed and frob it with this extra string we're passing in. - cargo.env("RUSTC_METADATA_SUFFIX", "rustc"); - } - - // Enable usage of unstable features - cargo.env("RUSTC_BOOTSTRAP", "1"); - self.add_rust_test_threads(&mut cargo); - - // Almost all of the crates that we compile as part of the bootstrap may - // have a build script, including the standard library. To compile a - // build script, however, it itself needs a standard library! This - // introduces a bit of a pickle when we're compiling the standard - // library itself. - // - // To work around this we actually end up using the snapshot compiler - // (stage0) for compiling build scripts of the standard library itself. - // The stage0 compiler is guaranteed to have a libstd available for use. - // - // For other crates, however, we know that we've already got a standard - // library up and running, so we can use the normal compiler to compile - // build scripts in that situation. - if mode == Mode::Libstd { - cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc) - .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()); - } else { - cargo.env("RUSTC_SNAPSHOT", self.compiler_path(compiler)) - .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler)); - } - - // Ignore incremental modes except for stage0, since we're - // not guaranteeing correctness across builds if the compiler - // is changing under your feet.` - if self.flags.incremental && compiler.stage == 0 { - let incr_dir = self.incremental_dir(compiler); - cargo.env("RUSTC_INCREMENTAL", incr_dir); - } - - if let Some(ref on_fail) = self.flags.on_fail { - cargo.env("RUSTC_ON_FAIL", on_fail); - } - - cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity)); - - // Specify some various options for build scripts used throughout - // the build. - // - // FIXME: the guard against msvc shouldn't need to be here - if !target.contains("msvc") { - cargo.env(format!("CC_{}", target), self.cc(target)) - .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None - .env(format!("CFLAGS_{}", target), self.cflags(target).join(" ")); - - if let Ok(cxx) = self.cxx(target) { - cargo.env(format!("CXX_{}", target), cxx); - } - } - - if mode == Mode::Libstd && - self.config.extended && - compiler.is_final_stage(self) { - cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string()); - } - - // When being built Cargo will at some point call `nmake.exe` on Windows - // MSVC. Unfortunately `nmake` will read these two environment variables - // below and try to intepret them. We're likely being run, however, from - // MSYS `make` which uses the same variables. - // - // As a result, to prevent confusion and errors, we remove these - // variables from our environment to prevent passing MSYS make flags to - // nmake, causing it to blow up. - if cfg!(target_env = "msvc") { - cargo.env_remove("MAKE"); - cargo.env_remove("MAKEFLAGS"); - } - - // Environment variables *required* throughout the build - // - // FIXME: should update code to not require this env var - cargo.env("CFG_COMPILER_HOST_TRIPLE", target); - - if self.is_verbose() { - cargo.arg("-v"); - } - // FIXME: cargo bench does not accept `--release` - if self.config.rust_optimize && cmd != "bench" { - cargo.arg("--release"); - } - if self.config.locked_deps { - cargo.arg("--locked"); - } - if self.config.vendor || self.is_sudo { - cargo.arg("--frozen"); - } - - self.ci_env.force_coloring_in_ci(&mut cargo); - - cargo - } - - /// Get a path to the compiler specified. - fn compiler_path(&self, compiler: &Compiler) -> PathBuf { - if compiler.is_snapshot(self) { - self.initial_rustc.clone() - } else { - self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host)) - } - } - - /// Get the specified tool built by the specified compiler - fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf { - self.cargo_out(compiler, Mode::Tool, compiler.host) - .join(exe(tool, compiler.host)) - } - - /// Get the `rustdoc` executable next to the specified compiler - fn rustdoc(&self, compiler: &Compiler) -> PathBuf { - let mut rustdoc = self.compiler_path(compiler); - rustdoc.pop(); - rustdoc.push(exe("rustdoc", compiler.host)); - rustdoc - } - - /// Get a `Command` which is ready to run `tool` in `stage` built for - /// `host`. - fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command { - let mut cmd = Command::new(self.tool(&compiler, tool)); - self.prepare_tool_cmd(compiler, &mut cmd); - cmd - } - - /// Prepares the `cmd` provided to be able to run the `compiler` provided. - /// - /// Notably this munges the dynamic library lookup path to point to the - /// right location to run `compiler`. - fn prepare_tool_cmd(&self, compiler: &Compiler, cmd: &mut Command) { - let host = compiler.host; - let mut paths = vec![ - self.sysroot_libdir(compiler, compiler.host), - self.cargo_out(compiler, Mode::Tool, host).join("deps"), - ]; - - // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make - // mode) and that C compiler may need some extra PATH modification. Do - // so here. - if compiler.host.contains("msvc") { - let curpaths = env::var_os("PATH").unwrap_or(OsString::new()); - let curpaths = env::split_paths(&curpaths).collect::>(); - for &(ref k, ref v) in self.cc[compiler.host].0.env() { - if k != "PATH" { - continue - } - for path in env::split_paths(v) { - if !curpaths.contains(&path) { - paths.push(path); - } - } - } - } - add_lib_path(paths, cmd); - } - /// Get the space-separated set of activated features for the standard /// library. fn std_features(&self) -> String { @@ -613,6 +409,9 @@ impl Build { if self.config.use_jemalloc { features.push_str(" jemalloc"); } + if self.config.llvm_enabled { + features.push_str(" llvm"); + } features } @@ -622,94 +421,67 @@ impl Build { if self.config.rust_optimize {"release"} else {"debug"} } - /// Returns the sysroot for the `compiler` specified that *this build system - /// generates*. - /// - /// That is, the sysroot for the stage0 compiler is not what the compiler - /// thinks it is by default, but it's the same as the default for stages - /// 1-3. - fn sysroot(&self, compiler: &Compiler) -> PathBuf { - if compiler.stage == 0 { - self.out.join(compiler.host).join("stage0-sysroot") - } else { - self.out.join(compiler.host).join(format!("stage{}", compiler.stage)) - } - } - /// Get the directory for incremental by-products when using the /// given compiler. - fn incremental_dir(&self, compiler: &Compiler) -> PathBuf { - self.out.join(compiler.host).join(format!("stage{}-incremental", compiler.stage)) - } - - /// Returns the libdir where the standard library and other artifacts are - /// found for a compiler's sysroot. - fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf { - if compiler.stage >= 2 { - if let Some(ref libdir_relative) = self.config.libdir_relative { - return self.sysroot(compiler).join(libdir_relative) - .join("rustlib").join(target).join("lib") - } - } - self.sysroot(compiler).join("lib").join("rustlib") - .join(target).join("lib") + fn incremental_dir(&self, compiler: Compiler) -> PathBuf { + self.out.join(&*compiler.host).join(format!("stage{}-incremental", compiler.stage)) } /// Returns the root directory for all output generated in a particular /// stage when running with a particular host compiler. /// /// The mode indicates what the root directory is for. - fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf { + fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf { let suffix = match mode { Mode::Libstd => "-std", Mode::Libtest => "-test", Mode::Tool => "-tools", Mode::Librustc => "-rustc", }; - self.out.join(compiler.host) + self.out.join(&*compiler.host) .join(format!("stage{}{}", compiler.stage, suffix)) } /// Returns the root output directory for all Cargo output in a given stage, - /// running a particular compiler, wehther or not we're building the + /// running a particular compiler, whether or not we're building the /// standard library, and targeting the specified architecture. fn cargo_out(&self, - compiler: &Compiler, + compiler: Compiler, mode: Mode, - target: &str) -> PathBuf { - self.stage_out(compiler, mode).join(target).join(self.cargo_dir()) + target: Interned) -> PathBuf { + self.stage_out(compiler, mode).join(&*target).join(self.cargo_dir()) } /// Root output directory for LLVM compiled for `target` /// /// Note that if LLVM is configured externally then the directory returned /// will likely be empty. - fn llvm_out(&self, target: &str) -> PathBuf { - self.out.join(target).join("llvm") + fn llvm_out(&self, target: Interned) -> PathBuf { + self.out.join(&*target).join("llvm") } /// Output directory for all documentation for a target - fn doc_out(&self, target: &str) -> PathBuf { - self.out.join(target).join("doc") + fn doc_out(&self, target: Interned) -> PathBuf { + self.out.join(&*target).join("doc") } /// Output directory for some generated md crate documentation for a target (temporary) - fn md_doc_out(&self, target: &str) -> PathBuf { - self.out.join(target).join("md-doc") + fn md_doc_out(&self, target: Interned) -> Interned { + INTERNER.intern_path(self.out.join(&*target).join("md-doc")) } /// Output directory for all crate documentation for a target (temporary) /// /// The artifacts here are then copied into `doc_out` above. - fn crate_doc_out(&self, target: &str) -> PathBuf { - self.out.join(target).join("crate-docs") + fn crate_doc_out(&self, target: Interned) -> PathBuf { + self.out.join(&*target).join("crate-docs") } /// Returns true if no custom `llvm-config` is set for the specified target. /// /// If no custom `llvm-config` was specified then Rust's llvm will be used. - fn is_rust_llvm(&self, target: &str) -> bool { - match self.config.target_config.get(target) { + fn is_rust_llvm(&self, target: Interned) -> bool { + match self.config.target_config.get(&target) { Some(ref c) => c.llvm_config.is_none(), None => true } @@ -719,25 +491,25 @@ impl Build { /// /// If a custom `llvm-config` was specified for target then that's returned /// instead. - fn llvm_config(&self, target: &str) -> PathBuf { - let target_config = self.config.target_config.get(target); + fn llvm_config(&self, target: Interned) -> PathBuf { + let target_config = self.config.target_config.get(&target); if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { s.clone() } else { - self.llvm_out(&self.config.build).join("bin") - .join(exe("llvm-config", target)) + self.llvm_out(self.config.build).join("bin") + .join(exe("llvm-config", &*target)) } } /// Returns the path to `FileCheck` binary for the specified target - fn llvm_filecheck(&self, target: &str) -> PathBuf { - let target_config = self.config.target_config.get(target); + fn llvm_filecheck(&self, target: Interned) -> PathBuf { + let target_config = self.config.target_config.get(&target); if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { let llvm_bindir = output(Command::new(s).arg("--bindir")); - Path::new(llvm_bindir.trim()).join(exe("FileCheck", target)) + Path::new(llvm_bindir.trim()).join(exe("FileCheck", &*target)) } else { - let base = self.llvm_out(&self.config.build).join("build"); - let exe = exe("FileCheck", target); + let base = self.llvm_out(self.config.build).join("build"); + let exe = exe("FileCheck", &*target); if !self.config.ninja && self.config.build.contains("msvc") { base.join("Release/bin").join(exe) } else { @@ -747,29 +519,16 @@ impl Build { } /// Directory for libraries built from C/C++ code and shared between stages. - fn native_dir(&self, target: &str) -> PathBuf { - self.out.join(target).join("native") + fn native_dir(&self, target: Interned) -> PathBuf { + self.out.join(&*target).join("native") } /// Root output directory for rust_test_helpers library compiled for /// `target` - fn test_helpers_out(&self, target: &str) -> PathBuf { + fn test_helpers_out(&self, target: Interned) -> PathBuf { self.native_dir(target).join("rust-test-helpers") } - /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic - /// library lookup path. - fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) { - // Windows doesn't need dylib path munging because the dlls for the - // compiler live next to the compiler and the system will find them - // automatically. - if cfg!(windows) { - return - } - - add_lib_path(vec![self.rustc_libdir(compiler)], cmd); - } - /// Adds the `RUST_TEST_THREADS` env var if necessary fn add_rust_test_threads(&self, cmd: &mut Command) { if env::var_os("RUST_TEST_THREADS").is_none() { @@ -777,19 +536,6 @@ impl Build { } } - /// Returns the compiler's libdir where it stores the dynamic libraries that - /// it itself links against. - /// - /// For example this returns `/lib` on Unix and `/bin` on - /// Windows. - fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf { - if compiler.is_snapshot(self) { - self.rustc_snapshot_libdir() - } else { - self.sysroot(compiler).join(libdir(compiler.host)) - } - } - /// Returns the libdir of the snapshot compiler. fn rustc_snapshot_libdir(&self) -> PathBuf { self.initial_rustc.parent().unwrap().parent().unwrap() @@ -842,20 +588,20 @@ impl Build { /// Returns the number of parallel jobs that have been configured for this /// build. fn jobs(&self) -> u32 { - self.flags.jobs.unwrap_or_else(|| num_cpus::get() as u32) + self.config.jobs.unwrap_or_else(|| num_cpus::get() as u32) } /// Returns the path to the C compiler for the target specified. - fn cc(&self, target: &str) -> &Path { - self.cc[target].0.path() + fn cc(&self, target: Interned) -> &Path { + self.cc[&target].0.path() } /// Returns a list of flags to pass to the C compiler for the target /// specified. - fn cflags(&self, target: &str) -> Vec { + fn cflags(&self, target: Interned) -> Vec { // Filter out -O and /O (the optimization flags) that we picked up from // gcc-rs because the build scripts will determine that for themselves. - let mut base = self.cc[target].0.args().iter() + let mut base = self.cc[&target].0.args().iter() .map(|s| s.to_string_lossy().into_owned()) .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) .collect::>(); @@ -871,20 +617,20 @@ impl Build { // Work around an apparently bad MinGW / GCC optimization, // See: http://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936 - if target == "i686-pc-windows-gnu" { + if &*target == "i686-pc-windows-gnu" { base.push("-fno-omit-frame-pointer".into()); } base } /// Returns the path to the `ar` archive utility for the target specified. - fn ar(&self, target: &str) -> Option<&Path> { - self.cc[target].1.as_ref().map(|p| &**p) + fn ar(&self, target: Interned) -> Option<&Path> { + self.cc[&target].1.as_ref().map(|p| &**p) } /// Returns the path to the C++ compiler for the target specified. - fn cxx(&self, target: &str) -> Result<&Path, String> { - match self.cxx.get(target) { + fn cxx(&self, target: Interned) -> Result<&Path, String> { + match self.cxx.get(&target) { Some(p) => Ok(p.path()), None => Err(format!( "target `{}` is not configured as a host, only as a target", @@ -893,7 +639,7 @@ impl Build { } /// Returns flags to pass to the compiler to generate code for `target`. - fn rustc_flags(&self, target: &str) -> Vec { + fn rustc_flags(&self, target: Interned) -> Vec { // New flags should be added here with great caution! // // It's quite unfortunate to **require** flags to generate code for a @@ -909,9 +655,19 @@ impl Build { base } + /// Returns if this target should statically link the C runtime, if specified + fn crt_static(&self, target: Interned) -> Option { + if target.contains("pc-windows-msvc") { + Some(true) + } else { + self.config.target_config.get(&target) + .and_then(|t| t.crt_static) + } + } + /// Returns the "musl root" for this `target`, if defined - fn musl_root(&self, target: &str) -> Option<&Path> { - self.config.target_config.get(target) + fn musl_root(&self, target: Interned) -> Option<&Path> { + self.config.target_config.get(&target) .and_then(|t| t.musl_root.as_ref()) .or(self.config.musl_root.as_ref()) .map(|p| &**p) @@ -919,8 +675,9 @@ impl Build { /// Returns whether the target will be tested using the `remote-test-client` /// and `remote-test-server` binaries. - fn remote_tested(&self, target: &str) -> bool { - self.qemu_rootfs(target).is_some() || target.contains("android") + fn remote_tested(&self, target: Interned) -> bool { + self.qemu_rootfs(target).is_some() || target.contains("android") || + env::var_os("TEST_DEVICE_ADDR").is_some() } /// Returns the root of the "rootfs" image that this target will be using, @@ -928,8 +685,8 @@ impl Build { /// /// If `Some` is returned then that means that tests for this target are /// emulated with QEMU and binaries will need to be shipped to the emulator. - fn qemu_rootfs(&self, target: &str) -> Option<&Path> { - self.config.target_config.get(target) + fn qemu_rootfs(&self, target: Interned) -> Option<&Path> { + self.config.target_config.get(&target) .and_then(|t| t.qemu_rootfs.as_ref()) .map(|p| &**p) } @@ -957,20 +714,20 @@ impl Build { /// /// When all of these conditions are met the build will lift artifacts from /// the previous stage forward. - fn force_use_stage1(&self, compiler: &Compiler, target: &str) -> bool { + fn force_use_stage1(&self, compiler: Compiler, target: Interned) -> bool { !self.config.full_bootstrap && compiler.stage >= 2 && - self.config.host.iter().any(|h| h == target) + self.hosts.iter().any(|h| *h == target) } /// Returns the directory that OpenSSL artifacts are compiled into if /// configured to do so. - fn openssl_dir(&self, target: &str) -> Option { + fn openssl_dir(&self, target: Interned) -> Option { // OpenSSL not used on Windows if target.contains("windows") { None } else if self.config.openssl_static { - Some(self.out.join(target).join("openssl")) + Some(self.out.join(&*target).join("openssl")) } else { None } @@ -978,7 +735,7 @@ impl Build { /// Returns the directory that OpenSSL artifacts are installed into if /// configured as such. - fn openssl_install_dir(&self, target: &str) -> Option { + fn openssl_install_dir(&self, target: Interned) -> Option { self.openssl_dir(target).map(|p| p.join("install")) } @@ -1077,16 +834,38 @@ impl Build { None } } + + /// Get a list of crates from a root crate. + /// + /// Returns Vec<(crate, path to crate, is_root_crate)> + fn crates(&self, root: &str) -> Vec<(Interned, &Path)> { + let interned = INTERNER.intern_string(root.to_owned()); + let mut ret = Vec::new(); + let mut list = vec![interned]; + let mut visited = HashSet::new(); + while let Some(krate) = list.pop() { + let krate = &self.crates[&krate]; + // If we can't strip prefix, then out-of-tree path + let path = krate.path.strip_prefix(&self.src).unwrap_or(&krate.path); + ret.push((krate.name, path)); + for dep in &krate.deps { + if visited.insert(dep) && dep != "build_helper" { + list.push(*dep); + } + } + } + ret + } } -impl<'a> Compiler<'a> { - /// Creates a new complier for the specified stage/host - fn new(stage: u32, host: &'a str) -> Compiler<'a> { - Compiler { stage: stage, host: host } +impl<'a> Compiler { + pub fn with_stage(mut self, stage: u32) -> Compiler { + self.stage = stage; + self } /// Returns whether this is a snapshot compiler for `build`'s configuration - fn is_snapshot(&self, build: &Build) -> bool { + pub fn is_snapshot(&self, build: &Build) -> bool { self.stage == 0 && self.host == build.build } @@ -1094,7 +873,7 @@ impl<'a> Compiler<'a> { /// current build session. /// This takes into account whether we're performing a full bootstrap or /// not; don't directly compare the stage with `2`! - fn is_final_stage(&self, build: &Build) -> bool { + pub fn is_final_stage(&self, build: &Build) -> bool { let final_stage = if build.config.full_bootstrap { 2 } else { 1 }; self.stage >= final_stage } diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs index 9326bb7129..5f1df1d26e 100644 --- a/src/bootstrap/metadata.rs +++ b/src/bootstrap/metadata.rs @@ -13,17 +13,18 @@ use std::process::Command; use std::path::PathBuf; use build_helper::output; -use rustc_serialize::json; +use serde_json; use {Build, Crate}; +use cache::INTERNER; -#[derive(RustcDecodable)] +#[derive(Deserialize)] struct Output { packages: Vec, resolve: Resolve, } -#[derive(RustcDecodable)] +#[derive(Deserialize)] struct Package { id: String, name: String, @@ -32,12 +33,12 @@ struct Package { manifest_path: String, } -#[derive(RustcDecodable)] +#[derive(Deserialize)] struct Resolve { nodes: Vec, } -#[derive(RustcDecodable)] +#[derive(Deserialize)] struct ResolveNode { id: String, dependencies: Vec, @@ -61,22 +62,23 @@ fn build_krate(build: &mut Build, krate: &str) { .arg("--format-version").arg("1") .arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml")); let output = output(&mut cargo); - let output: Output = json::decode(&output).unwrap(); + let output: Output = serde_json::from_str(&output).unwrap(); let mut id2name = HashMap::new(); for package in output.packages { if package.source.is_none() { - id2name.insert(package.id, package.name.clone()); + let name = INTERNER.intern_string(package.name); + id2name.insert(package.id, name); let mut path = PathBuf::from(package.manifest_path); path.pop(); - build.crates.insert(package.name.clone(), Crate { - build_step: format!("build-crate-{}", package.name), - doc_step: format!("doc-crate-{}", package.name), - test_step: format!("test-crate-{}", package.name), - bench_step: format!("bench-crate-{}", package.name), - name: package.name, + build.crates.insert(name, Crate { + build_step: format!("build-crate-{}", name), + doc_step: format!("doc-crate-{}", name), + test_step: format!("test-crate-{}", name), + bench_step: format!("bench-crate-{}", name), + name, version: package.version, deps: Vec::new(), - path: path, + path, }); } } @@ -93,7 +95,7 @@ fn build_krate(build: &mut Build, krate: &str) { Some(dep) => dep, None => continue, }; - krate.deps.push(dep.clone()); + krate.deps.push(*dep); } } } diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index d329630269..9410927824 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -56,6 +56,7 @@ check-aux: $(Q)$(BOOTSTRAP) test \ src/tools/cargotest \ src/tools/cargo \ + src/tools/rls \ src/test/pretty \ src/test/run-pass/pretty \ src/test/run-fail/pretty \ @@ -63,6 +64,8 @@ check-aux: src/test/run-pass-fulldeps/pretty \ src/test/run-fail-fulldeps/pretty \ $(BOOTSTRAP_ARGS) +check-bootstrap: + $(Q)$(CFG_PYTHON) $(CFG_SRC_DIR)src/bootstrap/bootstrap_test.py dist: $(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS) distcheck: diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 20eec97d8e..8429b64513 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -11,7 +11,7 @@ //! Compilation of native dependencies like LLVM. //! //! Native projects like LLVM unfortunately aren't suited just yet for -//! compilation in build scripts that Cargo has. This is because thie +//! compilation in build scripts that Cargo has. This is because the //! compilation takes a *very* long time but also because we don't want to //! compile LLVM 3 times as part of a normal bootstrap (we want it cached). //! @@ -32,174 +32,217 @@ use gcc; use Build; use util; use build_helper::up_to_date; +use builder::{Builder, RunConfig, ShouldRun, Step}; +use cache::Interned; -/// Compile LLVM for `target`. -pub fn llvm(build: &Build, target: &str) { - // If we're using a custom LLVM bail out here, but we can only use a - // custom LLVM for the build triple. - if let Some(config) = build.config.target_config.get(target) { - if let Some(ref s) = config.llvm_config { - return check_llvm_version(build, s); - } +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Llvm { + pub target: Interned, +} + +impl Step for Llvm { + type Output = (); + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/llvm") } - let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger"); - let mut rebuild_trigger_contents = String::new(); - t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents)); + fn make_run(run: RunConfig) { + run.builder.ensure(Llvm { target: run.target }) + } - let out_dir = build.llvm_out(target); - let done_stamp = out_dir.join("llvm-finished-building"); - if done_stamp.exists() { - let mut done_contents = String::new(); - t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents)); + /// Compile LLVM for `target`. + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; - // If LLVM was already built previously and contents of the rebuild-trigger file - // didn't change from the previous build, then no action is required. - if done_contents == rebuild_trigger_contents { - return + // If we're not compiling for LLVM bail out here. + if !build.config.llvm_enabled { + return; + } + + // If we're using a custom LLVM bail out here, but we can only use a + // custom LLVM for the build triple. + if let Some(config) = build.config.target_config.get(&target) { + if let Some(ref s) = config.llvm_config { + return check_llvm_version(build, s); + } } - } - if build.config.llvm_clean_rebuild { - drop(fs::remove_dir_all(&out_dir)); - } - let _folder = build.fold_output(|| "llvm"); - println!("Building LLVM for {}", target); - let _time = util::timeit(); - t!(fs::create_dir_all(&out_dir)); + let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger"); + let mut rebuild_trigger_contents = String::new(); + t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents)); - // http://llvm.org/docs/CMake.html - let mut cfg = cmake::Config::new(build.src.join("src/llvm")); - if build.config.ninja { - cfg.generator("Ninja"); - } + let out_dir = build.llvm_out(target); + let done_stamp = out_dir.join("llvm-finished-building"); + if done_stamp.exists() { + let mut done_contents = String::new(); + t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents)); - let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) { - (false, _) => "Debug", - (true, false) => "Release", - (true, true) => "RelWithDebInfo", - }; - - // NOTE: remember to also update `config.toml.example` when changing the defaults! - let llvm_targets = match build.config.llvm_targets { - Some(ref s) => s, - None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon", - }; - - let llvm_exp_targets = match build.config.llvm_experimental_targets { - Some(ref s) => s, - None => "", - }; - - let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"}; - - cfg.target(target) - .host(&build.build) - .out_dir(&out_dir) - .profile(profile) - .define("LLVM_ENABLE_ASSERTIONS", assertions) - .define("LLVM_TARGETS_TO_BUILD", llvm_targets) - .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) - .define("LLVM_INCLUDE_EXAMPLES", "OFF") - .define("LLVM_INCLUDE_TESTS", "OFF") - .define("LLVM_INCLUDE_DOCS", "OFF") - .define("LLVM_ENABLE_ZLIB", "OFF") - .define("WITH_POLLY", "OFF") - .define("LLVM_ENABLE_TERMINFO", "OFF") - .define("LLVM_ENABLE_LIBEDIT", "OFF") - .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string()) - .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) - .define("LLVM_DEFAULT_TARGET_TRIPLE", target); - - if target.contains("msvc") { - cfg.define("LLVM_USE_CRT_DEBUG", "MT"); - cfg.define("LLVM_USE_CRT_RELEASE", "MT"); - cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT"); - cfg.static_crt(true); - } + // If LLVM was already built previously and contents of the rebuild-trigger file + // didn't change from the previous build, then no action is required. + if done_contents == rebuild_trigger_contents { + return + } + } - if target.starts_with("i686") { - cfg.define("LLVM_BUILD_32_BITS", "ON"); - } + let _folder = build.fold_output(|| "llvm"); + println!("Building LLVM for {}", target); + let _time = util::timeit(); + t!(fs::create_dir_all(&out_dir)); - if let Some(num_linkers) = build.config.llvm_link_jobs { - if num_linkers > 0 { - cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string()); + // http://llvm.org/docs/CMake.html + let mut cfg = cmake::Config::new(build.src.join("src/llvm")); + if build.config.ninja { + cfg.generator("Ninja"); } - } - // http://llvm.org/docs/HowToCrossCompileLLVM.html - if target != build.build { - // FIXME: if the llvm root for the build triple is overridden then we - // should use llvm-tblgen from there, also should verify that it - // actually exists most of the time in normal installs of LLVM. - let host = build.llvm_out(&build.build).join("bin/llvm-tblgen"); - cfg.define("CMAKE_CROSSCOMPILING", "True") - .define("LLVM_TABLEGEN", &host); - } + let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) { + (false, _) => "Debug", + (true, false) => "Release", + (true, true) => "RelWithDebInfo", + }; + + // NOTE: remember to also update `config.toml.example` when changing the defaults! + let llvm_targets = match build.config.llvm_targets { + Some(ref s) => s, + None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon", + }; + + let llvm_exp_targets = match build.config.llvm_experimental_targets { + Some(ref s) => s, + None => "", + }; + + let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"}; + + cfg.target(&target) + .host(&build.build) + .out_dir(&out_dir) + .profile(profile) + .define("LLVM_ENABLE_ASSERTIONS", assertions) + .define("LLVM_TARGETS_TO_BUILD", llvm_targets) + .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets) + .define("LLVM_INCLUDE_EXAMPLES", "OFF") + .define("LLVM_INCLUDE_TESTS", "OFF") + .define("LLVM_INCLUDE_DOCS", "OFF") + .define("LLVM_ENABLE_ZLIB", "OFF") + .define("WITH_POLLY", "OFF") + .define("LLVM_ENABLE_TERMINFO", "OFF") + .define("LLVM_ENABLE_LIBEDIT", "OFF") + .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string()) + .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) + .define("LLVM_DEFAULT_TARGET_TRIPLE", target); + + + // This setting makes the LLVM tools link to the dynamic LLVM library, + // which saves both memory during parallel links and overall disk space + // for the tools. We don't distribute any of those tools, so this is + // just a local concern. However, it doesn't work well everywhere. + if target.contains("linux-gnu") || target.contains("apple-darwin") { + cfg.define("LLVM_LINK_LLVM_DYLIB", "ON"); + } - let sanitize_cc = |cc: &Path| { if target.contains("msvc") { - OsString::from(cc.to_str().unwrap().replace("\\", "/")) - } else { - cc.as_os_str().to_owned() + cfg.define("LLVM_USE_CRT_DEBUG", "MT"); + cfg.define("LLVM_USE_CRT_RELEASE", "MT"); + cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT"); + cfg.static_crt(true); } - }; - let configure_compilers = |cfg: &mut cmake::Config| { - // MSVC with CMake uses msbuild by default which doesn't respect these - // vars that we'd otherwise configure. In that case we just skip this - // entirely. - if target.contains("msvc") && !build.config.ninja { - return + if target.starts_with("i686") { + cfg.define("LLVM_BUILD_32_BITS", "ON"); } - let cc = build.cc(target); - let cxx = build.cxx(target).unwrap(); - - // Handle msvc + ninja + ccache specially (this is what the bots use) - if target.contains("msvc") && - build.config.ninja && - build.config.ccache.is_some() { - let mut cc = env::current_exe().expect("failed to get cwd"); - cc.set_file_name("sccache-plus-cl.exe"); - - cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc)); - cfg.env("SCCACHE_PATH", - build.config.ccache.as_ref().unwrap()) - .env("SCCACHE_TARGET", target); - - // If ccache is configured we inform the build a little differently hwo - // to invoke ccache while also invoking our compilers. - } else if let Some(ref ccache) = build.config.ccache { - cfg.define("CMAKE_C_COMPILER", ccache) - .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc)) - .define("CMAKE_CXX_COMPILER", ccache) - .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx)); - } else { - cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc)) - .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx)); + if let Some(num_linkers) = build.config.llvm_link_jobs { + if num_linkers > 0 { + cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string()); + } } - cfg.build_arg("-j").build_arg(build.jobs().to_string()); - cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" ")); - cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" ")); - }; + // http://llvm.org/docs/HowToCrossCompileLLVM.html + if target != build.build { + builder.ensure(Llvm { target: build.build }); + // FIXME: if the llvm root for the build triple is overridden then we + // should use llvm-tblgen from there, also should verify that it + // actually exists most of the time in normal installs of LLVM. + let host = build.llvm_out(build.build).join("bin/llvm-tblgen"); + cfg.define("CMAKE_CROSSCOMPILING", "True") + .define("LLVM_TABLEGEN", &host); + + if target.contains("netbsd") { + cfg.define("CMAKE_SYSTEM_NAME", "NetBSD"); + } else if target.contains("freebsd") { + cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); + } - configure_compilers(&mut cfg); + cfg.define("LLVM_NATIVE_BUILD", build.llvm_out(build.build).join("build")); + } - if env::var_os("SCCACHE_ERROR_LOG").is_some() { - cfg.env("RUST_LOG", "sccache=warn"); - } + let sanitize_cc = |cc: &Path| { + if target.contains("msvc") { + OsString::from(cc.to_str().unwrap().replace("\\", "/")) + } else { + cc.as_os_str().to_owned() + } + }; - // FIXME: we don't actually need to build all LLVM tools and all LLVM - // libraries here, e.g. we just want a few components and a few - // tools. Figure out how to filter them down and only build the right - // tools and libs on all platforms. - cfg.build(); + let configure_compilers = |cfg: &mut cmake::Config| { + // MSVC with CMake uses msbuild by default which doesn't respect these + // vars that we'd otherwise configure. In that case we just skip this + // entirely. + if target.contains("msvc") && !build.config.ninja { + return + } + + let cc = build.cc(target); + let cxx = build.cxx(target).unwrap(); + + // Handle msvc + ninja + ccache specially (this is what the bots use) + if target.contains("msvc") && + build.config.ninja && + build.config.ccache.is_some() { + let mut cc = env::current_exe().expect("failed to get cwd"); + cc.set_file_name("sccache-plus-cl.exe"); + + cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc)); + cfg.env("SCCACHE_PATH", + build.config.ccache.as_ref().unwrap()) + .env("SCCACHE_TARGET", target); + + // If ccache is configured we inform the build a little differently hwo + // to invoke ccache while also invoking our compilers. + } else if let Some(ref ccache) = build.config.ccache { + cfg.define("CMAKE_C_COMPILER", ccache) + .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc)) + .define("CMAKE_CXX_COMPILER", ccache) + .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx)); + } else { + cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc)) + .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx)); + } - t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes())); + cfg.build_arg("-j").build_arg(build.jobs().to_string()); + cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" ")); + cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" ")); + }; + + configure_compilers(&mut cfg); + + if env::var_os("SCCACHE_ERROR_LOG").is_some() { + cfg.env("RUST_LOG", "sccache=warn"); + } + + // FIXME: we don't actually need to build all LLVM tools and all LLVM + // libraries here, e.g. we just want a few components and a few + // tools. Figure out how to filter them down and only build the right + // tools and libs on all platforms. + cfg.build(); + + t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes())); + } } fn check_llvm_version(build: &Build, llvm_config: &Path) { @@ -216,161 +259,196 @@ fn check_llvm_version(build: &Build, llvm_config: &Path) { panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version) } -/// Compiles the `rust_test_helpers.c` library which we used in various -/// `run-pass` test suites for ABI testing. -pub fn test_helpers(build: &Build, target: &str) { - let dst = build.test_helpers_out(target); - let src = build.src.join("src/rt/rust_test_helpers.c"); - if up_to_date(&src, &dst.join("librust_test_helpers.a")) { - return +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct TestHelpers { + pub target: Interned, +} + +impl Step for TestHelpers { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/rt/rust_test_helpers.c") } - let _folder = build.fold_output(|| "build_test_helpers"); - println!("Building test helpers"); - t!(fs::create_dir_all(&dst)); - let mut cfg = gcc::Config::new(); - - // We may have found various cross-compilers a little differently due to our - // extra configuration, so inform gcc of these compilers. Note, though, that - // on MSVC we still need gcc's detection of env vars (ugh). - if !target.contains("msvc") { - if let Some(ar) = build.ar(target) { - cfg.archiver(ar); - } - cfg.compiler(build.cc(target)); + fn make_run(run: RunConfig) { + run.builder.ensure(TestHelpers { target: run.target }) } - cfg.cargo_metadata(false) - .out_dir(&dst) - .target(target) - .host(&build.build) - .opt_level(0) - .debug(false) - .file(build.src.join("src/rt/rust_test_helpers.c")) - .compile("librust_test_helpers.a"); + /// Compiles the `rust_test_helpers.c` library which we used in various + /// `run-pass` test suites for ABI testing. + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + let dst = build.test_helpers_out(target); + let src = build.src.join("src/rt/rust_test_helpers.c"); + if up_to_date(&src, &dst.join("librust_test_helpers.a")) { + return + } + + let _folder = build.fold_output(|| "build_test_helpers"); + println!("Building test helpers"); + t!(fs::create_dir_all(&dst)); + let mut cfg = gcc::Config::new(); + + // We may have found various cross-compilers a little differently due to our + // extra configuration, so inform gcc of these compilers. Note, though, that + // on MSVC we still need gcc's detection of env vars (ugh). + if !target.contains("msvc") { + if let Some(ar) = build.ar(target) { + cfg.archiver(ar); + } + cfg.compiler(build.cc(target)); + } + + cfg.cargo_metadata(false) + .out_dir(&dst) + .target(&target) + .host(&build.build) + .opt_level(0) + .debug(false) + .file(build.src.join("src/rt/rust_test_helpers.c")) + .compile("librust_test_helpers.a"); + } } + const OPENSSL_VERS: &'static str = "1.0.2k"; const OPENSSL_SHA256: &'static str = "6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0"; -pub fn openssl(build: &Build, target: &str) { - let out = match build.openssl_dir(target) { - Some(dir) => dir, - None => return, - }; +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Openssl { + pub target: Interned, +} - let stamp = out.join(".stamp"); - let mut contents = String::new(); - drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents))); - if contents == OPENSSL_VERS { - return +impl Step for Openssl { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() } - t!(fs::create_dir_all(&out)); - - let name = format!("openssl-{}.tar.gz", OPENSSL_VERS); - let tarball = out.join(&name); - if !tarball.exists() { - let tmp = tarball.with_extension("tmp"); - // originally from https://www.openssl.org/source/... - let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}", - name); - let mut ok = false; - for _ in 0..3 { - let status = Command::new("curl") - .arg("-o").arg(&tmp) - .arg(&url) - .status() - .expect("failed to spawn curl"); - if status.success() { - ok = true; - break - } + + fn run(self, builder: &Builder) { + let build = builder.build; + let target = self.target; + let out = match build.openssl_dir(target) { + Some(dir) => dir, + None => return, + }; + + let stamp = out.join(".stamp"); + let mut contents = String::new(); + drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents))); + if contents == OPENSSL_VERS { + return } - if !ok { - panic!("failed to download openssl source") + t!(fs::create_dir_all(&out)); + + let name = format!("openssl-{}.tar.gz", OPENSSL_VERS); + let tarball = out.join(&name); + if !tarball.exists() { + let tmp = tarball.with_extension("tmp"); + // originally from https://www.openssl.org/source/... + let url = format!("https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/{}", + name); + let mut ok = false; + for _ in 0..3 { + let status = Command::new("curl") + .arg("-o").arg(&tmp) + .arg(&url) + .status() + .expect("failed to spawn curl"); + if status.success() { + ok = true; + break + } + } + if !ok { + panic!("failed to download openssl source") + } + let mut shasum = if target.contains("apple") { + let mut cmd = Command::new("shasum"); + cmd.arg("-a").arg("256"); + cmd + } else { + Command::new("sha256sum") + }; + let output = output(&mut shasum.arg(&tmp)); + let found = output.split_whitespace().next().unwrap(); + if found != OPENSSL_SHA256 { + panic!("downloaded openssl sha256 different\n\ + expected: {}\n\ + found: {}\n", OPENSSL_SHA256, found); + } + t!(fs::rename(&tmp, &tarball)); } - let mut shasum = if target.contains("apple") { - let mut cmd = Command::new("shasum"); - cmd.arg("-a").arg("256"); - cmd - } else { - Command::new("sha256sum") + let obj = out.join(format!("openssl-{}", OPENSSL_VERS)); + let dst = build.openssl_install_dir(target).unwrap(); + drop(fs::remove_dir_all(&obj)); + drop(fs::remove_dir_all(&dst)); + build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out)); + + let mut configure = Command::new(obj.join("Configure")); + configure.arg(format!("--prefix={}", dst.display())); + configure.arg("no-dso"); + configure.arg("no-ssl2"); + configure.arg("no-ssl3"); + + let os = match &*target { + "aarch64-linux-android" => "linux-aarch64", + "aarch64-unknown-linux-gnu" => "linux-aarch64", + "arm-linux-androideabi" => "android", + "arm-unknown-linux-gnueabi" => "linux-armv4", + "arm-unknown-linux-gnueabihf" => "linux-armv4", + "armv7-linux-androideabi" => "android-armv7", + "armv7-unknown-linux-gnueabihf" => "linux-armv4", + "i686-apple-darwin" => "darwin-i386-cc", + "i686-linux-android" => "android-x86", + "i686-unknown-freebsd" => "BSD-x86-elf", + "i686-unknown-linux-gnu" => "linux-elf", + "i686-unknown-linux-musl" => "linux-elf", + "mips-unknown-linux-gnu" => "linux-mips32", + "mips64-unknown-linux-gnuabi64" => "linux64-mips64", + "mips64el-unknown-linux-gnuabi64" => "linux64-mips64", + "mipsel-unknown-linux-gnu" => "linux-mips32", + "powerpc-unknown-linux-gnu" => "linux-ppc", + "powerpc64-unknown-linux-gnu" => "linux-ppc64", + "powerpc64le-unknown-linux-gnu" => "linux-ppc64le", + "s390x-unknown-linux-gnu" => "linux64-s390x", + "x86_64-apple-darwin" => "darwin64-x86_64-cc", + "x86_64-linux-android" => "linux-x86_64", + "x86_64-unknown-freebsd" => "BSD-x86_64", + "x86_64-unknown-linux-gnu" => "linux-x86_64", + "x86_64-unknown-linux-musl" => "linux-x86_64", + "x86_64-unknown-netbsd" => "BSD-x86_64", + _ => panic!("don't know how to configure OpenSSL for {}", target), }; - let output = output(&mut shasum.arg(&tmp)); - let found = output.split_whitespace().next().unwrap(); - if found != OPENSSL_SHA256 { - panic!("downloaded openssl sha256 different\n\ - expected: {}\n\ - found: {}\n", OPENSSL_SHA256, found); + configure.arg(os); + configure.env("CC", build.cc(target)); + for flag in build.cflags(target) { + configure.arg(flag); } - t!(fs::rename(&tmp, &tarball)); - } - let obj = out.join(format!("openssl-{}", OPENSSL_VERS)); - let dst = build.openssl_install_dir(target).unwrap(); - drop(fs::remove_dir_all(&obj)); - drop(fs::remove_dir_all(&dst)); - build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out)); - - let mut configure = Command::new(obj.join("Configure")); - configure.arg(format!("--prefix={}", dst.display())); - configure.arg("no-dso"); - configure.arg("no-ssl2"); - configure.arg("no-ssl3"); - - let os = match target { - "aarch64-linux-android" => "linux-aarch64", - "aarch64-unknown-linux-gnu" => "linux-aarch64", - "arm-linux-androideabi" => "android", - "arm-unknown-linux-gnueabi" => "linux-armv4", - "arm-unknown-linux-gnueabihf" => "linux-armv4", - "armv7-linux-androideabi" => "android-armv7", - "armv7-unknown-linux-gnueabihf" => "linux-armv4", - "i686-apple-darwin" => "darwin-i386-cc", - "i686-linux-android" => "android-x86", - "i686-unknown-freebsd" => "BSD-x86-elf", - "i686-unknown-linux-gnu" => "linux-elf", - "i686-unknown-linux-musl" => "linux-elf", - "mips-unknown-linux-gnu" => "linux-mips32", - "mips64-unknown-linux-gnuabi64" => "linux64-mips64", - "mips64el-unknown-linux-gnuabi64" => "linux64-mips64", - "mipsel-unknown-linux-gnu" => "linux-mips32", - "powerpc-unknown-linux-gnu" => "linux-ppc", - "powerpc64-unknown-linux-gnu" => "linux-ppc64", - "powerpc64le-unknown-linux-gnu" => "linux-ppc64le", - "s390x-unknown-linux-gnu" => "linux64-s390x", - "x86_64-apple-darwin" => "darwin64-x86_64-cc", - "x86_64-linux-android" => "linux-x86_64", - "x86_64-unknown-freebsd" => "BSD-x86_64", - "x86_64-unknown-linux-gnu" => "linux-x86_64", - "x86_64-unknown-linux-musl" => "linux-x86_64", - "x86_64-unknown-netbsd" => "BSD-x86_64", - _ => panic!("don't know how to configure OpenSSL for {}", target), - }; - configure.arg(os); - configure.env("CC", build.cc(target)); - for flag in build.cflags(target) { - configure.arg(flag); - } - // There is no specific os target for android aarch64 or x86_64, - // so we need to pass some extra cflags - if target == "aarch64-linux-android" || target == "x86_64-linux-android" { - configure.arg("-mandroid"); - configure.arg("-fomit-frame-pointer"); - } - // Make PIE binaries - // Non-PIE linker support was removed in Lollipop - // https://source.android.com/security/enhancements/enhancements50 - if target == "i686-linux-android" { - configure.arg("no-asm"); + // There is no specific os target for android aarch64 or x86_64, + // so we need to pass some extra cflags + if target == "aarch64-linux-android" || target == "x86_64-linux-android" { + configure.arg("-mandroid"); + configure.arg("-fomit-frame-pointer"); + } + // Make PIE binaries + // Non-PIE linker support was removed in Lollipop + // https://source.android.com/security/enhancements/enhancements50 + if target == "i686-linux-android" { + configure.arg("no-asm"); + } + configure.current_dir(&obj); + println!("Configuring openssl for {}", target); + build.run_quiet(&mut configure); + println!("Building openssl for {}", target); + build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj)); + println!("Installing openssl for {}", target); + build.run_quiet(Command::new("make").arg("install").current_dir(&obj)); + + let mut f = t!(File::create(&stamp)); + t!(f.write_all(OPENSSL_VERS.as_bytes())); } - configure.current_dir(&obj); - println!("Configuring openssl for {}", target); - build.run_quiet(&mut configure); - println!("Building openssl for {}", target); - build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj)); - println!("Installing openssl for {}", target); - build.run_quiet(Command::new("make").arg("install").current_dir(&obj)); - - let mut f = t!(File::create(&stamp)); - t!(f.write_all(OPENSSL_VERS.as_bytes())); } diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs index a9c1b023dd..54208d8bb5 100644 --- a/src/bootstrap/sanity.rs +++ b/src/bootstrap/sanity.rs @@ -85,7 +85,7 @@ pub fn check(build: &mut Build) { } // We need cmake, but only if we're actually building LLVM or sanitizers. - let building_llvm = build.config.host.iter() + let building_llvm = build.hosts.iter() .filter_map(|host| build.config.target_config.get(host)) .any(|config| config.llvm_config.is_none()); if building_llvm || build.config.sanitizers { @@ -93,10 +93,27 @@ pub fn check(build: &mut Build) { } // Ninja is currently only used for LLVM itself. - // Some Linux distros rename `ninja` to `ninja-build`. - // CMake can work with either binary name. - if building_llvm && build.config.ninja && cmd_finder.maybe_have("ninja-build").is_none() { - cmd_finder.must_have("ninja"); + if building_llvm { + if build.config.ninja { + // Some Linux distros rename `ninja` to `ninja-build`. + // CMake can work with either binary name. + if cmd_finder.maybe_have("ninja-build").is_none() { + cmd_finder.must_have("ninja"); + } + } + + // If ninja isn't enabled but we're building for MSVC then we try + // doubly hard to enable it. It was realized in #43767 that the msbuild + // CMake generator for MSVC doesn't respect configuration options like + // disabling LLVM assertions, which can often be quite important! + // + // In these cases we automatically enable Ninja if we find it in the + // environment. + if !build.config.ninja && build.config.build.contains("msvc") { + if cmd_finder.maybe_have("ninja").is_some() { + build.config.ninja = true; + } + } } build.config.python = build.config.python.take().map(|p| cmd_finder.must_have(p)) @@ -114,7 +131,7 @@ pub fn check(build: &mut Build) { // We're gonna build some custom C code here and there, host triples // also build some C++ shims for LLVM so we need a C++ compiler. - for target in &build.config.target { + for target in &build.targets { // On emscripten we don't actually need the C compiler to just // build the target artifacts, only for testing. For the sake // of easier bot configuration, just skip detection. @@ -122,14 +139,14 @@ pub fn check(build: &mut Build) { continue; } - cmd_finder.must_have(build.cc(target)); - if let Some(ar) = build.ar(target) { + cmd_finder.must_have(build.cc(*target)); + if let Some(ar) = build.ar(*target) { cmd_finder.must_have(ar); } } - for host in build.config.host.iter() { - cmd_finder.must_have(build.cxx(host).unwrap()); + for host in &build.hosts { + cmd_finder.must_have(build.cxx(*host).unwrap()); // The msvc hosts don't use jemalloc, turn it off globally to // avoid packaging the dummy liballoc_jemalloc on that platform. @@ -139,21 +156,28 @@ pub fn check(build: &mut Build) { } // Externally configured LLVM requires FileCheck to exist - let filecheck = build.llvm_filecheck(&build.build); + let filecheck = build.llvm_filecheck(build.build); if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests { panic!("FileCheck executable {:?} does not exist", filecheck); } - for target in &build.config.target { + for target in &build.targets { // Can't compile for iOS unless we're on macOS if target.contains("apple-ios") && !build.build.contains("apple-darwin") { panic!("the iOS target is only supported on macOS"); } - // Make sure musl-root is valid if specified + // Make sure musl-root is valid if target.contains("musl") && !target.contains("mips") { - match build.musl_root(target) { + // If this is a native target (host is also musl) and no musl-root is given, + // fall back to the system toolchain in /usr before giving up + if build.musl_root(*target).is_none() && build.config.build == *target { + let target = build.config.target_config.entry(target.clone()) + .or_insert(Default::default()); + target.musl_root = Some("/usr".into()); + } + match build.musl_root(*target) { Some(root) => { if fs::metadata(root.join("lib/libc.a")).is_err() { panic!("couldn't find libc.a in musl dir: {}", diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs deleted file mode 100644 index e35031b3c9..0000000000 --- a/src/bootstrap/step.rs +++ /dev/null @@ -1,1817 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Definition of steps of the build system. -//! -//! This is where some of the real meat of rustbuild is located, in how we -//! define targets and the dependencies amongst them. This file can sort of be -//! viewed as just defining targets in a makefile which shell out to predefined -//! functions elsewhere about how to execute the target. -//! -//! The primary function here you're likely interested in is the `build_rules` -//! function. This will create a `Rules` structure which basically just lists -//! everything that rustbuild can do. Each rule has a human-readable name, a -//! path associated with it, some dependencies, and then a closure of how to -//! actually perform the rule. -//! -//! All steps below are defined in self-contained units, so adding a new target -//! to the build system should just involve adding the meta information here -//! along with the actual implementation elsewhere. You can find more comments -//! about how to define rules themselves below. - -use std::collections::{BTreeMap, HashSet, HashMap}; -use std::mem; -use std::path::PathBuf; -use std::process; - -use check::{self, TestKind}; -use compile; -use dist; -use doc; -use flags::Subcommand; -use install; -use native; -use {Compiler, Build, Mode}; - -pub fn run(build: &Build) { - let rules = build_rules(build); - let steps = rules.plan(); - rules.run(&steps); -} - -pub fn build_rules<'a>(build: &'a Build) -> Rules { - let mut rules = Rules::new(build); - - // This is the first rule that we're going to define for rustbuild, which is - // used to compile LLVM itself. All rules are added through the `rules` - // structure created above and are configured through a builder-style - // interface. - // - // First up we see the `build` method. This represents a rule that's part of - // the top-level `build` subcommand. For example `./x.py build` is what this - // is associating with. Note that this is normally only relevant if you flag - // a rule as `default`, which we'll talk about later. - // - // Next up we'll see two arguments to this method: - // - // * `llvm` - this is the "human readable" name of this target. This name is - // not accessed anywhere outside this file itself (e.g. not in - // the CLI nor elsewhere in rustbuild). The purpose of this is to - // easily define dependencies between rules. That is, other rules - // will depend on this with the name "llvm". - // * `src/llvm` - this is the relevant path to the rule that we're working - // with. This path is the engine behind how commands like - // `./x.py build src/llvm` work. This should typically point - // to the relevant component, but if there's not really a - // path to be assigned here you can pass something like - // `path/to/nowhere` to ignore it. - // - // After we create the rule with the `build` method we can then configure - // various aspects of it. For example this LLVM rule uses `.host(true)` to - // flag that it's a rule only for host targets. In other words, LLVM isn't - // compiled for targets configured through `--target` (e.g. those we're just - // building a standard library for). - // - // Next up the `dep` method will add a dependency to this rule. The closure - // is yielded the step that represents executing the `llvm` rule itself - // (containing information like stage, host, target, ...) and then it must - // return a target that the step depends on. Here LLVM is actually - // interesting where a cross-compiled LLVM depends on the host LLVM, but - // otherwise it has no dependencies. - // - // To handle this we do a bit of dynamic dispatch to see what the dependency - // is. If we're building a LLVM for the build triple, then we don't actually - // have any dependencies! To do that we return a dependency on the `Step::noop()` - // target which does nothing. - // - // If we're build a cross-compiled LLVM, however, we need to assemble the - // libraries from the previous compiler. This step has the same name as - // ours (llvm) but we want it for a different target, so we use the - // builder-style methods on `Step` to configure this target to the build - // triple. - // - // Finally, to finish off this rule, we define how to actually execute it. - // That logic is all defined in the `native` module so we just delegate to - // the relevant function there. The argument to the closure passed to `run` - // is a `Step` (defined below) which encapsulates information like the - // stage, target, host, etc. - rules.build("llvm", "src/llvm") - .host(true) - .dep(move |s| { - if s.target == build.build { - Step::noop() - } else { - s.target(&build.build) - } - }) - .run(move |s| native::llvm(build, s.target)); - - // Ok! After that example rule that's hopefully enough to explain what's - // going on here. You can check out the API docs below and also see a bunch - // more examples of rules directly below as well. - - // the compiler with no target libraries ready to go - rules.build("rustc", "src/rustc") - .dep(|s| s.name("create-sysroot").target(s.host)) - .dep(move |s| { - if s.stage == 0 { - Step::noop() - } else { - s.name("librustc") - .host(&build.build) - .stage(s.stage - 1) - } - }) - .run(move |s| compile::assemble_rustc(build, s.stage, s.target)); - - // Helper for loading an entire DAG of crates, rooted at `name` - let krates = |name: &str| { - let mut ret = Vec::new(); - let mut list = vec![name]; - let mut visited = HashSet::new(); - while let Some(krate) = list.pop() { - let default = krate == name; - let krate = &build.crates[krate]; - let path = krate.path.strip_prefix(&build.src) - // This handles out of tree paths - .unwrap_or(&krate.path); - ret.push((krate, path.to_str().unwrap(), default)); - for dep in krate.deps.iter() { - if visited.insert(dep) && dep != "build_helper" { - list.push(dep); - } - } - } - ret - }; - - // ======================================================================== - // Crate compilations - // - // Tools used during the build system but not shipped - rules.build("create-sysroot", "path/to/nowhere") - .run(move |s| compile::create_sysroot(build, &s.compiler())); - - // These rules are "pseudo rules" that don't actually do any work - // themselves, but represent a complete sysroot with the relevant compiler - // linked into place. - // - // That is, depending on "libstd" means that when the rule is completed then - // the `stage` sysroot for the compiler `host` will be available with a - // standard library built for `target` linked in place. Not all rules need - // the compiler itself to be available, just the standard library, so - // there's a distinction between the two. - rules.build("libstd", "src/libstd") - .dep(|s| s.name("rustc").target(s.host)) - .dep(|s| s.name("libstd-link")); - rules.build("libtest", "src/libtest") - .dep(|s| s.name("libstd")) - .dep(|s| s.name("libtest-link")) - .default(true); - rules.build("librustc", "src/librustc") - .dep(|s| s.name("libtest")) - .dep(|s| s.name("librustc-link")) - .host(true) - .default(true); - - // Helper method to define the rules to link a crate into its place in the - // sysroot. - // - // The logic here is a little subtle as there's a few cases to consider. - // Not all combinations of (stage, host, target) actually require something - // to be compiled, but rather libraries could get propagated from a - // different location. For example: - // - // * Any crate with a `host` that's not the build triple will not actually - // compile something. A different `host` means that the build triple will - // actually compile the libraries, and then we'll copy them over from the - // build triple to the `host` directory. - // - // * Some crates aren't even compiled by the build triple, but may be copied - // from previous stages. For example if we're not doing a full bootstrap - // then we may just depend on the stage1 versions of libraries to be - // available to get linked forward. - // - // * Finally, there are some cases, however, which do indeed comiple crates - // and link them into place afterwards. - // - // The rule definition below mirrors these three cases. The `dep` method - // calculates the correct dependency which either comes from stage1, a - // different compiler, or from actually building the crate itself (the `dep` - // rule). The `run` rule then mirrors these three cases and links the cases - // forward into the compiler sysroot specified from the correct location. - fn crate_rule<'a, 'b>(build: &'a Build, - rules: &'b mut Rules<'a>, - krate: &'a str, - dep: &'a str, - link: fn(&Build, &Compiler, &Compiler, &str)) - -> RuleBuilder<'a, 'b> { - let mut rule = rules.build(&krate, "path/to/nowhere"); - rule.dep(move |s| { - if build.force_use_stage1(&s.compiler(), s.target) { - s.host(&build.build).stage(1) - } else if s.host == build.build { - s.name(dep) - } else { - s.host(&build.build) - } - }) - .run(move |s| { - if build.force_use_stage1(&s.compiler(), s.target) { - link(build, - &s.stage(1).host(&build.build).compiler(), - &s.compiler(), - s.target) - } else if s.host == build.build { - link(build, &s.compiler(), &s.compiler(), s.target) - } else { - link(build, - &s.host(&build.build).compiler(), - &s.compiler(), - s.target) - } - }); - rule - } - - // Similar to the `libstd`, `libtest`, and `librustc` rules above, except - // these rules only represent the libraries being available in the sysroot, - // not the compiler itself. This is done as not all rules need a compiler in - // the sysroot, but may just need the libraries. - // - // All of these rules use the helper definition above. - crate_rule(build, - &mut rules, - "libstd-link", - "build-crate-std", - compile::std_link) - .dep(|s| s.name("startup-objects")) - .dep(|s| s.name("create-sysroot").target(s.host)); - crate_rule(build, - &mut rules, - "libtest-link", - "build-crate-test", - compile::test_link) - .dep(|s| s.name("libstd-link")); - crate_rule(build, - &mut rules, - "librustc-link", - "build-crate-rustc-main", - compile::rustc_link) - .dep(|s| s.name("libtest-link")); - - for (krate, path, _default) in krates("std") { - rules.build(&krate.build_step, path) - .dep(|s| s.name("startup-objects")) - .dep(move |s| s.name("rustc").host(&build.build).target(s.host)) - .run(move |s| compile::std(build, s.target, &s.compiler())); - } - for (krate, path, _default) in krates("test") { - rules.build(&krate.build_step, path) - .dep(|s| s.name("libstd-link")) - .run(move |s| compile::test(build, s.target, &s.compiler())); - } - for (krate, path, _default) in krates("rustc-main") { - rules.build(&krate.build_step, path) - .dep(|s| s.name("libtest-link")) - .dep(move |s| s.name("llvm").host(&build.build).stage(0)) - .dep(|s| s.name("may-run-build-script")) - .run(move |s| compile::rustc(build, s.target, &s.compiler())); - } - - // Crates which have build scripts need to rely on this rule to ensure that - // the necessary prerequisites for a build script are linked and located in - // place. - rules.build("may-run-build-script", "path/to/nowhere") - .dep(move |s| { - s.name("libstd-link") - .host(&build.build) - .target(&build.build) - }); - rules.build("startup-objects", "src/rtstartup") - .dep(|s| s.name("create-sysroot").target(s.host)) - .run(move |s| compile::build_startup_objects(build, &s.compiler(), s.target)); - - // ======================================================================== - // Test targets - // - // Various unit tests and tests suites we can run - { - let mut suite = |name, path, mode, dir| { - rules.test(name, path) - .dep(|s| s.name("libtest")) - .dep(|s| s.name("tool-compiletest").target(s.host).stage(0)) - .dep(|s| s.name("test-helpers")) - .dep(|s| s.name("remote-copy-libs")) - .default(mode != "pretty") // pretty tests don't run everywhere - .run(move |s| { - check::compiletest(build, &s.compiler(), s.target, mode, dir) - }); - }; - - suite("check-ui", "src/test/ui", "ui", "ui"); - suite("check-rpass", "src/test/run-pass", "run-pass", "run-pass"); - suite("check-cfail", "src/test/compile-fail", "compile-fail", "compile-fail"); - suite("check-pfail", "src/test/parse-fail", "parse-fail", "parse-fail"); - suite("check-rfail", "src/test/run-fail", "run-fail", "run-fail"); - suite("check-rpass-valgrind", "src/test/run-pass-valgrind", - "run-pass-valgrind", "run-pass-valgrind"); - suite("check-mir-opt", "src/test/mir-opt", "mir-opt", "mir-opt"); - if build.config.codegen_tests { - suite("check-codegen", "src/test/codegen", "codegen", "codegen"); - } - suite("check-codegen-units", "src/test/codegen-units", "codegen-units", - "codegen-units"); - suite("check-incremental", "src/test/incremental", "incremental", - "incremental"); - } - - if build.build.contains("msvc") { - // nothing to do for debuginfo tests - } else { - rules.test("check-debuginfo-lldb", "src/test/debuginfo-lldb") - .dep(|s| s.name("libtest")) - .dep(|s| s.name("tool-compiletest").target(s.host).stage(0)) - .dep(|s| s.name("test-helpers")) - .dep(|s| s.name("debugger-scripts")) - .run(move |s| check::compiletest(build, &s.compiler(), s.target, - "debuginfo-lldb", "debuginfo")); - rules.test("check-debuginfo-gdb", "src/test/debuginfo-gdb") - .dep(|s| s.name("libtest")) - .dep(|s| s.name("tool-compiletest").target(s.host).stage(0)) - .dep(|s| s.name("test-helpers")) - .dep(|s| s.name("debugger-scripts")) - .dep(|s| s.name("remote-copy-libs")) - .run(move |s| check::compiletest(build, &s.compiler(), s.target, - "debuginfo-gdb", "debuginfo")); - let mut rule = rules.test("check-debuginfo", "src/test/debuginfo"); - rule.default(true); - if build.build.contains("apple") { - rule.dep(|s| s.name("check-debuginfo-lldb")); - } else { - rule.dep(|s| s.name("check-debuginfo-gdb")); - } - } - - rules.test("debugger-scripts", "src/etc/lldb_batchmode.py") - .run(move |s| dist::debugger_scripts(build, &build.sysroot(&s.compiler()), - s.target)); - - { - let mut suite = |name, path, mode, dir| { - rules.test(name, path) - .dep(|s| s.name("librustc")) - .dep(|s| s.name("test-helpers")) - .dep(|s| s.name("tool-compiletest").target(s.host).stage(0)) - .default(mode != "pretty") - .host(true) - .run(move |s| { - check::compiletest(build, &s.compiler(), s.target, mode, dir) - }); - }; - - suite("check-ui-full", "src/test/ui-fulldeps", "ui", "ui-fulldeps"); - suite("check-rpass-full", "src/test/run-pass-fulldeps", - "run-pass", "run-pass-fulldeps"); - suite("check-rfail-full", "src/test/run-fail-fulldeps", - "run-fail", "run-fail-fulldeps"); - suite("check-cfail-full", "src/test/compile-fail-fulldeps", - "compile-fail", "compile-fail-fulldeps"); - suite("check-rmake", "src/test/run-make", "run-make", "run-make"); - suite("check-rustdoc", "src/test/rustdoc", "rustdoc", "rustdoc"); - suite("check-pretty", "src/test/pretty", "pretty", "pretty"); - suite("check-pretty-rpass", "src/test/run-pass/pretty", "pretty", - "run-pass"); - suite("check-pretty-rfail", "src/test/run-fail/pretty", "pretty", - "run-fail"); - suite("check-pretty-valgrind", "src/test/run-pass-valgrind/pretty", "pretty", - "run-pass-valgrind"); - suite("check-pretty-rpass-full", "src/test/run-pass-fulldeps/pretty", - "pretty", "run-pass-fulldeps"); - suite("check-pretty-rfail-full", "src/test/run-fail-fulldeps/pretty", - "pretty", "run-fail-fulldeps"); - } - - for (krate, path, _default) in krates("std") { - rules.test(&krate.test_step, path) - .dep(|s| s.name("libtest")) - .dep(|s| s.name("remote-copy-libs")) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libstd, TestKind::Test, - Some(&krate.name))); - } - rules.test("check-std-all", "path/to/nowhere") - .dep(|s| s.name("libtest")) - .dep(|s| s.name("remote-copy-libs")) - .default(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libstd, TestKind::Test, None)); - - // std benchmarks - for (krate, path, _default) in krates("std") { - rules.bench(&krate.bench_step, path) - .dep(|s| s.name("libtest")) - .dep(|s| s.name("remote-copy-libs")) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libstd, TestKind::Bench, - Some(&krate.name))); - } - rules.bench("bench-std-all", "path/to/nowhere") - .dep(|s| s.name("libtest")) - .dep(|s| s.name("remote-copy-libs")) - .default(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libstd, TestKind::Bench, None)); - - for (krate, path, _default) in krates("test") { - rules.test(&krate.test_step, path) - .dep(|s| s.name("libtest")) - .dep(|s| s.name("remote-copy-libs")) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libtest, TestKind::Test, - Some(&krate.name))); - } - rules.test("check-test-all", "path/to/nowhere") - .dep(|s| s.name("libtest")) - .dep(|s| s.name("remote-copy-libs")) - .default(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libtest, TestKind::Test, None)); - for (krate, path, _default) in krates("rustc-main") { - rules.test(&krate.test_step, path) - .dep(|s| s.name("librustc")) - .dep(|s| s.name("remote-copy-libs")) - .host(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Librustc, TestKind::Test, - Some(&krate.name))); - } - rules.test("check-rustc-all", "path/to/nowhere") - .dep(|s| s.name("librustc")) - .dep(|s| s.name("remote-copy-libs")) - .default(true) - .host(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Librustc, TestKind::Test, None)); - - rules.test("check-linkchecker", "src/tools/linkchecker") - .dep(|s| s.name("tool-linkchecker").stage(0)) - .dep(|s| s.name("default:doc")) - .default(build.config.docs) - .host(true) - .run(move |s| check::linkcheck(build, s.target)); - rules.test("check-cargotest", "src/tools/cargotest") - .dep(|s| s.name("tool-cargotest").stage(0)) - .dep(|s| s.name("librustc")) - .host(true) - .run(move |s| check::cargotest(build, s.stage, s.target)); - rules.test("check-cargo", "src/tools/cargo") - .dep(|s| s.name("tool-cargo")) - .host(true) - .run(move |s| check::cargo(build, s.stage, s.target)); - rules.test("check-rls", "src/tools/rls") - .dep(|s| s.name("tool-rls")) - .host(true) - .run(move |s| check::rls(build, s.stage, s.target)); - rules.test("check-tidy", "src/tools/tidy") - .dep(|s| s.name("tool-tidy").stage(0)) - .default(true) - .host(true) - .only_build(true) - .run(move |s| check::tidy(build, s.target)); - rules.test("check-error-index", "src/tools/error_index_generator") - .dep(|s| s.name("libstd")) - .dep(|s| s.name("tool-error-index").host(s.host).stage(0)) - .default(true) - .host(true) - .run(move |s| check::error_index(build, &s.compiler())); - rules.test("check-docs", "src/doc") - .dep(|s| s.name("libtest")) - .default(true) - .host(true) - .run(move |s| check::docs(build, &s.compiler())); - rules.test("check-distcheck", "distcheck") - .dep(|s| s.name("dist-plain-source-tarball")) - .dep(|s| s.name("dist-src")) - .run(move |_| check::distcheck(build)); - - rules.build("test-helpers", "src/rt/rust_test_helpers.c") - .run(move |s| native::test_helpers(build, s.target)); - rules.build("openssl", "path/to/nowhere") - .run(move |s| native::openssl(build, s.target)); - - // Some test suites are run inside emulators or on remote devices, and most - // of our test binaries are linked dynamically which means we need to ship - // the standard library and such to the emulator ahead of time. This step - // represents this and is a dependency of all test suites. - // - // Most of the time this step is a noop (the `check::emulator_copy_libs` - // only does work if necessary). For some steps such as shipping data to - // QEMU we have to build our own tools so we've got conditional dependencies - // on those programs as well. Note that the remote test client is built for - // the build target (us) and the server is built for the target. - rules.test("remote-copy-libs", "path/to/nowhere") - .dep(|s| s.name("libtest")) - .dep(move |s| { - if build.remote_tested(s.target) { - s.name("tool-remote-test-client").target(s.host).stage(0) - } else { - Step::noop() - } - }) - .dep(move |s| { - if build.remote_tested(s.target) { - s.name("tool-remote-test-server") - } else { - Step::noop() - } - }) - .run(move |s| check::remote_copy_libs(build, &s.compiler(), s.target)); - - rules.test("check-bootstrap", "src/bootstrap") - .default(true) - .host(true) - .only_build(true) - .run(move |_| check::bootstrap(build)); - - // ======================================================================== - // Build tools - // - // Tools used during the build system but not shipped - rules.build("tool-rustbook", "src/tools/rustbook") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("librustc-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "rustbook")); - rules.build("tool-error-index", "src/tools/error_index_generator") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("librustc-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator")); - rules.build("tool-unstable-book-gen", "src/tools/unstable-book-gen") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "unstable-book-gen")); - rules.build("tool-tidy", "src/tools/tidy") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "tidy")); - rules.build("tool-linkchecker", "src/tools/linkchecker") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "linkchecker")); - rules.build("tool-cargotest", "src/tools/cargotest") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "cargotest")); - rules.build("tool-compiletest", "src/tools/compiletest") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libtest-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "compiletest")); - rules.build("tool-build-manifest", "src/tools/build-manifest") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "build-manifest")); - rules.build("tool-remote-test-server", "src/tools/remote-test-server") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server")); - rules.build("tool-remote-test-client", "src/tools/remote-test-client") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client")); - rules.build("tool-rust-installer", "src/tools/rust-installer") - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer")); - rules.build("tool-cargo", "src/tools/cargo") - .host(true) - .default(build.config.extended) - .dep(|s| s.name("maybe-clean-tools")) - .dep(|s| s.name("libstd-tool")) - .dep(|s| s.stage(0).host(s.target).name("openssl")) - .dep(move |s| { - // Cargo depends on procedural macros, which requires a full host - // compiler to be available, so we need to depend on that. - s.name("librustc-link") - .target(&build.build) - .host(&build.build) - }) - .run(move |s| compile::tool(build, s.stage, s.target, "cargo")); - rules.build("tool-rls", "src/tools/rls") - .host(true) - .dep(|s| s.name("librustc-tool")) - .dep(|s| s.stage(0).host(s.target).name("openssl")) - .dep(move |s| { - // rls, like cargo, uses procedural macros - s.name("librustc-link") - .target(&build.build) - .host(&build.build) - }) - .run(move |s| compile::tool(build, s.stage, s.target, "rls")); - - // "pseudo rule" which represents completely cleaning out the tools dir in - // one stage. This needs to happen whenever a dependency changes (e.g. - // libstd, libtest, librustc) and all of the tool compilations above will - // be sequenced after this rule. - rules.build("maybe-clean-tools", "path/to/nowhere") - .after("librustc-tool") - .after("libtest-tool") - .after("libstd-tool"); - - rules.build("librustc-tool", "path/to/nowhere") - .dep(|s| s.name("librustc")) - .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Librustc)); - rules.build("libtest-tool", "path/to/nowhere") - .dep(|s| s.name("libtest")) - .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libtest)); - rules.build("libstd-tool", "path/to/nowhere") - .dep(|s| s.name("libstd")) - .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libstd)); - - // ======================================================================== - // Documentation targets - rules.doc("doc-book", "src/doc/book") - .dep(move |s| { - s.name("tool-rustbook") - .host(&build.build) - .target(&build.build) - .stage(0) - }) - .default(build.config.docs) - .run(move |s| doc::book(build, s.target, "book")); - rules.doc("doc-nomicon", "src/doc/nomicon") - .dep(move |s| { - s.name("tool-rustbook") - .host(&build.build) - .target(&build.build) - .stage(0) - }) - .default(build.config.docs) - .run(move |s| doc::rustbook(build, s.target, "nomicon")); - rules.doc("doc-reference", "src/doc/reference") - .dep(move |s| { - s.name("tool-rustbook") - .host(&build.build) - .target(&build.build) - .stage(0) - }) - .default(build.config.docs) - .run(move |s| doc::rustbook(build, s.target, "reference")); - rules.doc("doc-unstable-book", "src/doc/unstable-book") - .dep(move |s| { - s.name("tool-rustbook") - .host(&build.build) - .target(&build.build) - .stage(0) - }) - .dep(move |s| s.name("doc-unstable-book-gen")) - .default(build.config.docs) - .run(move |s| doc::rustbook_src(build, - s.target, - "unstable-book", - &build.md_doc_out(s.target))); - rules.doc("doc-standalone", "src/doc") - .dep(move |s| { - s.name("rustc") - .host(&build.build) - .target(&build.build) - .stage(0) - }) - .default(build.config.docs) - .run(move |s| doc::standalone(build, s.target)); - rules.doc("doc-error-index", "src/tools/error_index_generator") - .dep(move |s| s.name("tool-error-index").target(&build.build).stage(0)) - .dep(move |s| s.name("librustc-link")) - .default(build.config.docs) - .host(true) - .run(move |s| doc::error_index(build, s.target)); - rules.doc("doc-unstable-book-gen", "src/tools/unstable-book-gen") - .dep(move |s| { - s.name("tool-unstable-book-gen") - .host(&build.build) - .target(&build.build) - .stage(0) - }) - .dep(move |s| s.name("libstd-link")) - .default(build.config.docs) - .host(true) - .run(move |s| doc::unstable_book_gen(build, s.target)); - for (krate, path, default) in krates("std") { - rules.doc(&krate.doc_step, path) - .dep(|s| s.name("libstd-link")) - .default(default && build.config.docs) - .run(move |s| doc::std(build, s.stage, s.target)); - } - for (krate, path, default) in krates("test") { - rules.doc(&krate.doc_step, path) - .dep(|s| s.name("libtest-link")) - // Needed so rustdoc generates relative links to std. - .dep(|s| s.name("doc-crate-std")) - .default(default && build.config.compiler_docs) - .run(move |s| doc::test(build, s.stage, s.target)); - } - for (krate, path, default) in krates("rustc-main") { - rules.doc(&krate.doc_step, path) - .dep(|s| s.name("librustc-link")) - // Needed so rustdoc generates relative links to std. - .dep(|s| s.name("doc-crate-std")) - .host(true) - .default(default && build.config.docs) - .run(move |s| doc::rustc(build, s.stage, s.target)); - } - - // ======================================================================== - // Distribution targets - rules.dist("dist-rustc", "src/librustc") - .dep(move |s| s.name("rustc").host(&build.build)) - .host(true) - .only_host_build(true) - .default(true) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| dist::rustc(build, s.stage, s.target)); - rules.dist("dist-std", "src/libstd") - .dep(move |s| { - // We want to package up as many target libraries as possible - // for the `rust-std` package, so if this is a host target we - // depend on librustc and otherwise we just depend on libtest. - if build.config.host.iter().any(|t| t == s.target) { - s.name("librustc-link") - } else { - s.name("libtest-link") - } - }) - .default(true) - .only_host_build(true) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| dist::std(build, &s.compiler(), s.target)); - rules.dist("dist-mingw", "path/to/nowhere") - .default(true) - .only_host_build(true) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| { - if s.target.contains("pc-windows-gnu") { - dist::mingw(build, s.target) - } - }); - rules.dist("dist-plain-source-tarball", "src") - .default(build.config.rust_dist_src) - .host(true) - .only_build(true) - .only_host_build(true) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |_| dist::plain_source_tarball(build)); - rules.dist("dist-src", "src") - .default(true) - .host(true) - .only_build(true) - .only_host_build(true) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |_| dist::rust_src(build)); - rules.dist("dist-docs", "src/doc") - .default(true) - .only_host_build(true) - .dep(|s| s.name("default:doc")) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| dist::docs(build, s.stage, s.target)); - rules.dist("dist-analysis", "analysis") - .default(build.config.extended) - .dep(|s| s.name("dist-std")) - .only_host_build(true) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| dist::analysis(build, &s.compiler(), s.target)); - rules.dist("dist-rls", "rls") - .host(true) - .only_host_build(true) - .dep(|s| s.name("tool-rls")) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| dist::rls(build, s.stage, s.target)); - rules.dist("dist-cargo", "cargo") - .host(true) - .only_host_build(true) - .dep(|s| s.name("tool-cargo")) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| dist::cargo(build, s.stage, s.target)); - rules.dist("dist-extended", "extended") - .default(build.config.extended) - .host(true) - .only_host_build(true) - .dep(|d| d.name("dist-std")) - .dep(|d| d.name("dist-rustc")) - .dep(|d| d.name("dist-mingw")) - .dep(|d| d.name("dist-docs")) - .dep(|d| d.name("dist-cargo")) - .dep(|d| d.name("dist-analysis")) - .dep(move |s| tool_rust_installer(build, s)) - .run(move |s| dist::extended(build, s.stage, s.target)); - - rules.dist("dist-sign", "hash-and-sign") - .host(true) - .only_build(true) - .only_host_build(true) - .dep(move |s| s.name("tool-build-manifest").target(&build.build).stage(0)) - .run(move |_| dist::hash_and_sign(build)); - - rules.install("install-docs", "src/doc") - .default(build.config.docs) - .only_host_build(true) - .dep(|s| s.name("dist-docs")) - .run(move |s| install::Installer::new(build).install_docs(s.stage, s.target)); - rules.install("install-std", "src/libstd") - .default(true) - .only_host_build(true) - .dep(|s| s.name("dist-std")) - .run(move |s| install::Installer::new(build).install_std(s.stage)); - rules.install("install-cargo", "cargo") - .default(build.config.extended) - .host(true) - .only_host_build(true) - .dep(|s| s.name("dist-cargo")) - .run(move |s| install::Installer::new(build).install_cargo(s.stage, s.target)); - rules.install("install-rls", "rls") - .host(true) - .only_host_build(true) - .dep(|s| s.name("dist-rls")) - .run(move |s| install::Installer::new(build).install_rls(s.stage, s.target)); - rules.install("install-analysis", "analysis") - .default(build.config.extended) - .only_host_build(true) - .dep(|s| s.name("dist-analysis")) - .run(move |s| install::Installer::new(build).install_analysis(s.stage, s.target)); - rules.install("install-src", "src") - .default(build.config.extended) - .host(true) - .only_build(true) - .only_host_build(true) - .dep(|s| s.name("dist-src")) - .run(move |s| install::Installer::new(build).install_src(s.stage)); - rules.install("install-rustc", "src/librustc") - .default(true) - .host(true) - .only_host_build(true) - .dep(|s| s.name("dist-rustc")) - .run(move |s| install::Installer::new(build).install_rustc(s.stage, s.target)); - - rules.verify(); - return rules; - - /// Helper to depend on a stage0 build-only rust-installer tool. - fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> { - step.name("tool-rust-installer") - .host(&build.build) - .target(&build.build) - .stage(0) - } -} - -#[derive(PartialEq, Eq, Hash, Clone, Debug)] -struct Step<'a> { - /// Human readable name of the rule this step is executing. Possible names - /// are all defined above in `build_rules`. - name: &'a str, - - /// The stage this step is executing in. This is typically 0, 1, or 2. - stage: u32, - - /// This step will likely involve a compiler, and the target that compiler - /// itself is built for is called the host, this variable. Typically this is - /// the target of the build machine itself. - host: &'a str, - - /// The target that this step represents generating. If you're building a - /// standard library for a new suite of targets, for example, this'll be set - /// to those targets. - target: &'a str, -} - -impl<'a> Step<'a> { - fn noop() -> Step<'a> { - Step { name: "", stage: 0, host: "", target: "" } - } - - /// Creates a new step which is the same as this, except has a new name. - fn name(&self, name: &'a str) -> Step<'a> { - Step { name: name, ..*self } - } - - /// Creates a new step which is the same as this, except has a new stage. - fn stage(&self, stage: u32) -> Step<'a> { - Step { stage: stage, ..*self } - } - - /// Creates a new step which is the same as this, except has a new host. - fn host(&self, host: &'a str) -> Step<'a> { - Step { host: host, ..*self } - } - - /// Creates a new step which is the same as this, except has a new target. - fn target(&self, target: &'a str) -> Step<'a> { - Step { target: target, ..*self } - } - - /// Returns the `Compiler` structure that this step corresponds to. - fn compiler(&self) -> Compiler<'a> { - Compiler::new(self.stage, self.host) - } -} - -struct Rule<'a> { - /// The human readable name of this target, defined in `build_rules`. - name: &'a str, - - /// The path associated with this target, used in the `./x.py` driver for - /// easy and ergonomic specification of what to do. - path: &'a str, - - /// The "kind" of top-level command that this rule is associated with, only - /// relevant if this is a default rule. - kind: Kind, - - /// List of dependencies this rule has. Each dependency is a function from a - /// step that's being executed to another step that should be executed. - deps: Vec) -> Step<'a> + 'a>>, - - /// How to actually execute this rule. Takes a step with contextual - /// information and then executes it. - run: Box) + 'a>, - - /// Whether or not this is a "default" rule. That basically means that if - /// you run, for example, `./x.py test` whether it's included or not. - default: bool, - - /// Whether or not this is a "host" rule, or in other words whether this is - /// only intended for compiler hosts and not for targets that are being - /// generated. - host: bool, - - /// Whether this rule is only for steps where the host is the build triple, - /// not anything in hosts or targets. - only_host_build: bool, - - /// Whether this rule is only for the build triple, not anything in hosts or - /// targets. - only_build: bool, - - /// A list of "order only" dependencies. This rules does not actually - /// depend on these rules, but if they show up in the dependency graph then - /// this rule must be executed after all these rules. - after: Vec<&'a str>, -} - -#[derive(PartialEq)] -enum Kind { - Build, - Test, - Bench, - Dist, - Doc, - Install, -} - -impl<'a> Rule<'a> { - fn new(name: &'a str, path: &'a str, kind: Kind) -> Rule<'a> { - Rule { - name: name, - deps: Vec::new(), - run: Box::new(|_| ()), - path: path, - kind: kind, - default: false, - host: false, - only_host_build: false, - only_build: false, - after: Vec::new(), - } - } -} - -/// Builder pattern returned from the various methods on `Rules` which will add -/// the rule to the internal list on `Drop`. -struct RuleBuilder<'a: 'b, 'b> { - rules: &'b mut Rules<'a>, - rule: Rule<'a>, -} - -impl<'a, 'b> RuleBuilder<'a, 'b> { - fn dep(&mut self, f: F) -> &mut Self - where F: Fn(&Step<'a>) -> Step<'a> + 'a, - { - self.rule.deps.push(Box::new(f)); - self - } - - fn after(&mut self, step: &'a str) -> &mut Self { - self.rule.after.push(step); - self - } - - fn run(&mut self, f: F) -> &mut Self - where F: Fn(&Step<'a>) + 'a, - { - self.rule.run = Box::new(f); - self - } - - fn default(&mut self, default: bool) -> &mut Self { - self.rule.default = default; - self - } - - fn host(&mut self, host: bool) -> &mut Self { - self.rule.host = host; - self - } - - fn only_build(&mut self, only_build: bool) -> &mut Self { - self.rule.only_build = only_build; - self - } - - fn only_host_build(&mut self, only_host_build: bool) -> &mut Self { - self.rule.only_host_build = only_host_build; - self - } -} - -impl<'a, 'b> Drop for RuleBuilder<'a, 'b> { - fn drop(&mut self) { - let rule = mem::replace(&mut self.rule, Rule::new("", "", Kind::Build)); - let prev = self.rules.rules.insert(rule.name, rule); - if let Some(prev) = prev { - panic!("duplicate rule named: {}", prev.name); - } - } -} - -pub struct Rules<'a> { - build: &'a Build, - sbuild: Step<'a>, - rules: BTreeMap<&'a str, Rule<'a>>, -} - -impl<'a> Rules<'a> { - fn new(build: &'a Build) -> Rules<'a> { - Rules { - build: build, - sbuild: Step { - stage: build.flags.stage.unwrap_or(2), - target: &build.build, - host: &build.build, - name: "", - }, - rules: BTreeMap::new(), - } - } - - /// Creates a new rule of `Kind::Build` with the specified human readable - /// name and path associated with it. - /// - /// The builder returned should be configured further with information such - /// as how to actually run this rule. - fn build<'b>(&'b mut self, name: &'a str, path: &'a str) - -> RuleBuilder<'a, 'b> { - self.rule(name, path, Kind::Build) - } - - /// Same as `build`, but for `Kind::Test`. - fn test<'b>(&'b mut self, name: &'a str, path: &'a str) - -> RuleBuilder<'a, 'b> { - self.rule(name, path, Kind::Test) - } - - /// Same as `build`, but for `Kind::Bench`. - fn bench<'b>(&'b mut self, name: &'a str, path: &'a str) - -> RuleBuilder<'a, 'b> { - self.rule(name, path, Kind::Bench) - } - - /// Same as `build`, but for `Kind::Doc`. - fn doc<'b>(&'b mut self, name: &'a str, path: &'a str) - -> RuleBuilder<'a, 'b> { - self.rule(name, path, Kind::Doc) - } - - /// Same as `build`, but for `Kind::Dist`. - fn dist<'b>(&'b mut self, name: &'a str, path: &'a str) - -> RuleBuilder<'a, 'b> { - self.rule(name, path, Kind::Dist) - } - - /// Same as `build`, but for `Kind::Install`. - fn install<'b>(&'b mut self, name: &'a str, path: &'a str) - -> RuleBuilder<'a, 'b> { - self.rule(name, path, Kind::Install) - } - - fn rule<'b>(&'b mut self, - name: &'a str, - path: &'a str, - kind: Kind) -> RuleBuilder<'a, 'b> { - RuleBuilder { - rules: self, - rule: Rule::new(name, path, kind), - } - } - - /// Verify the dependency graph defined by all our rules are correct, e.g. - /// everything points to a valid something else. - fn verify(&self) { - for rule in self.rules.values() { - for dep in rule.deps.iter() { - let dep = dep(&self.sbuild.name(rule.name)); - if self.rules.contains_key(&dep.name) || dep.name.starts_with("default:") { - continue - } - if dep == Step::noop() { - continue - } - panic!("\ - -invalid rule dependency graph detected, was a rule added and maybe typo'd? - - `{}` depends on `{}` which does not exist - -", rule.name, dep.name); - } - } - } - - pub fn get_help(&self, command: &str) -> Option { - let kind = match command { - "build" => Kind::Build, - "doc" => Kind::Doc, - "test" => Kind::Test, - "bench" => Kind::Bench, - "dist" => Kind::Dist, - "install" => Kind::Install, - _ => return None, - }; - let rules = self.rules.values().filter(|r| r.kind == kind); - let rules = rules.filter(|r| !r.path.contains("nowhere")); - let mut rules = rules.collect::>(); - rules.sort_by_key(|r| r.path); - - let mut help_string = String::from("Available paths:\n"); - for rule in rules { - help_string.push_str(format!(" ./x.py {} {}\n", command, rule.path).as_str()); - } - Some(help_string) - } - - /// Construct the top-level build steps that we're going to be executing, - /// given the subcommand that our build is performing. - fn plan(&self) -> Vec> { - // Ok, the logic here is pretty subtle, and involves quite a few - // conditionals. The basic idea here is to: - // - // 1. First, filter all our rules to the relevant ones. This means that - // the command specified corresponds to one of our `Kind` variants, - // and we filter all rules based on that. - // - // 2. Next, we determine which rules we're actually executing. If a - // number of path filters were specified on the command line we look - // for those, otherwise we look for anything tagged `default`. - // Here we also compute the priority of each rule based on how early - // in the command line the matching path filter showed up. - // - // 3. Finally, we generate some steps with host and target information. - // - // The last step is by far the most complicated and subtle. The basic - // thinking here is that we want to take the cartesian product of - // specified hosts and targets and build rules with that. The list of - // hosts and targets, if not specified, come from the how this build was - // configured. If the rule we're looking at is a host-only rule the we - // ignore the list of targets and instead consider the list of hosts - // also the list of targets. - // - // Once the host and target lists are generated we take the cartesian - // product of the two and then create a step based off them. Note that - // the stage each step is associated was specified with the `--step` - // flag on the command line. - let (kind, paths) = match self.build.flags.cmd { - Subcommand::Build { ref paths } => (Kind::Build, &paths[..]), - Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]), - Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]), - Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]), - Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]), - Subcommand::Install { ref paths } => (Kind::Install, &paths[..]), - Subcommand::Clean => panic!(), - }; - - let mut rules: Vec<_> = self.rules.values().filter_map(|rule| { - if rule.kind != kind { - return None; - } - - if paths.len() == 0 && rule.default { - Some((rule, 0)) - } else { - paths.iter() - .position(|path| path.ends_with(rule.path)) - .map(|priority| (rule, priority)) - } - }).collect(); - - if rules.is_empty() && - !paths.get(0).unwrap_or(&PathBuf::new()) - .ends_with("nonexistent/path/to/trigger/cargo/metadata") { - println!("\nNothing to run...\n"); - process::exit(1); - } - - rules.sort_by_key(|&(_, priority)| priority); - - rules.into_iter().flat_map(|(rule, _)| { - let hosts = if rule.only_host_build || rule.only_build { - self.build.build_slice() - } else { - &self.build.hosts - }; - // Determine the actual targets participating in this rule. - // NOTE: We should keep the full projection from build triple to - // the hosts for the dist steps, now that the hosts array above is - // truncated to avoid duplication of work in that case. Therefore - // the original non-shadowed hosts array is used below. - let arr = if rule.host { - // If --target was specified but --host wasn't specified, - // don't run any host-only tests. - if self.build.flags.host.len() > 0 { - &self.build.hosts - } else if self.build.flags.target.len() > 0 { - &[] - } else if rule.only_build { - self.build.build_slice() - } else { - &self.build.hosts - } - } else { - &self.build.targets - }; - - hosts.iter().flat_map(move |host| { - arr.iter().map(move |target| { - self.sbuild.name(rule.name).target(target).host(host) - }) - }) - }).collect() - } - - /// Execute all top-level targets indicated by `steps`. - /// - /// This will take the list returned by `plan` and then execute each step - /// along with all required dependencies as it goes up the chain. - fn run(&self, steps: &[Step<'a>]) { - self.build.verbose("bootstrap top targets:"); - for step in steps.iter() { - self.build.verbose(&format!("\t{:?}", step)); - } - - // Using `steps` as the top-level targets, make a topological ordering - // of what we need to do. - let order = self.expand(steps); - - // Print out what we're doing for debugging - self.build.verbose("bootstrap build plan:"); - for step in order.iter() { - self.build.verbose(&format!("\t{:?}", step)); - } - - // And finally, iterate over everything and execute it. - for step in order.iter() { - if self.build.flags.keep_stage.map_or(false, |s| step.stage <= s) { - self.build.verbose(&format!("keeping step {:?}", step)); - continue; - } - self.build.verbose(&format!("executing step {:?}", step)); - (self.rules[step.name].run)(step); - } - - // Check for postponed failures from `test --no-fail-fast`. - let failures = self.build.delayed_failures.get(); - if failures > 0 { - println!("\n{} command(s) did not execute successfully.\n", failures); - process::exit(1); - } - } - - /// From the top level targets `steps` generate a topological ordering of - /// all steps needed to run those steps. - fn expand(&self, steps: &[Step<'a>]) -> Vec> { - // First up build a graph of steps and their dependencies. The `nodes` - // map is a map from step to a unique number. The `edges` map is a - // map from these unique numbers to a list of other numbers, - // representing dependencies. - let mut nodes = HashMap::new(); - nodes.insert(Step::noop(), 0); - let mut edges = HashMap::new(); - edges.insert(0, HashSet::new()); - for step in steps { - self.build_graph(step.clone(), &mut nodes, &mut edges); - } - - // Now that we've built up the actual dependency graph, draw more - // dependency edges to satisfy the `after` dependencies field for each - // rule. - self.satisfy_after_deps(&nodes, &mut edges); - - // And finally, perform a topological sort to return a list of steps to - // execute. - let mut order = Vec::new(); - let mut visited = HashSet::new(); - visited.insert(0); - let idx_to_node = nodes.iter().map(|p| (*p.1, p.0)).collect::>(); - for idx in 0..nodes.len() { - self.topo_sort(idx, &idx_to_node, &edges, &mut visited, &mut order); - } - order - } - - /// Builds the dependency graph rooted at `step`. - /// - /// The `nodes` and `edges` maps are filled out according to the rule - /// described by `step.name`. - fn build_graph(&self, - step: Step<'a>, - nodes: &mut HashMap, usize>, - edges: &mut HashMap>) -> usize { - use std::collections::hash_map::Entry; - - let idx = nodes.len(); - match nodes.entry(step.clone()) { - Entry::Vacant(e) => { e.insert(idx); } - Entry::Occupied(e) => return *e.get(), - } - - let mut deps = Vec::new(); - for dep in self.rules[step.name].deps.iter() { - let dep = dep(&step); - if dep.name.starts_with("default:") { - let kind = match &dep.name[8..] { - "doc" => Kind::Doc, - "dist" => Kind::Dist, - kind => panic!("unknown kind: `{}`", kind), - }; - let host = self.build.config.host.iter().any(|h| h == dep.target); - let rules = self.rules.values().filter(|r| r.default); - for rule in rules.filter(|r| r.kind == kind && (!r.host || host)) { - deps.push(self.build_graph(dep.name(rule.name), nodes, edges)); - } - } else { - deps.push(self.build_graph(dep, nodes, edges)); - } - } - - edges.entry(idx).or_insert(HashSet::new()).extend(deps); - idx - } - - /// Given a dependency graph with a finished list of `nodes`, fill out more - /// dependency `edges`. - /// - /// This is the step which satisfies all `after` listed dependencies in - /// `Rule` above. - fn satisfy_after_deps(&self, - nodes: &HashMap, usize>, - edges: &mut HashMap>) { - // Reverse map from the name of a step to the node indices that it - // appears at. - let mut name_to_idx = HashMap::new(); - for (step, &idx) in nodes { - name_to_idx.entry(step.name).or_insert(Vec::new()).push(idx); - } - - for (step, idx) in nodes { - if *step == Step::noop() { - continue - } - for after in self.rules[step.name].after.iter() { - // This is the critical piece of an `after` dependency. If the - // dependency isn't actually in our graph then no edge is drawn, - // only if it's already present do we draw the edges. - if let Some(idxs) = name_to_idx.get(after) { - edges.get_mut(idx).unwrap() - .extend(idxs.iter().cloned()); - } - } - } - } - - fn topo_sort(&self, - cur: usize, - nodes: &HashMap>, - edges: &HashMap>, - visited: &mut HashSet, - order: &mut Vec>) { - if !visited.insert(cur) { - return - } - for dep in edges[&cur].iter() { - self.topo_sort(*dep, nodes, edges, visited, order); - } - order.push(nodes[&cur].clone()); - } -} - -#[cfg(test)] -mod tests { - use std::env; - - use Build; - use config::Config; - use flags::Flags; - - fn build(args: &[&str], - extra_host: &[&str], - extra_target: &[&str]) -> Build { - build_(args, extra_host, extra_target, true) - } - - fn build_(args: &[&str], - extra_host: &[&str], - extra_target: &[&str], - docs: bool) -> Build { - let mut args = args.iter().map(|s| s.to_string()).collect::>(); - args.push("--build".to_string()); - args.push("A".to_string()); - let flags = Flags::parse(&args); - - let mut config = Config::default(); - config.docs = docs; - config.build = "A".to_string(); - config.host = vec![config.build.clone()]; - config.host.extend(extra_host.iter().map(|s| s.to_string())); - config.target = config.host.clone(); - config.target.extend(extra_target.iter().map(|s| s.to_string())); - - let mut build = Build::new(flags, config); - let cwd = env::current_dir().unwrap(); - build.crates.insert("std".to_string(), ::Crate { - name: "std".to_string(), - deps: Vec::new(), - path: cwd.join("src/std"), - doc_step: "doc-crate-std".to_string(), - build_step: "build-crate-std".to_string(), - test_step: "test-crate-std".to_string(), - bench_step: "bench-crate-std".to_string(), - version: String::new(), - }); - build.crates.insert("test".to_string(), ::Crate { - name: "test".to_string(), - deps: Vec::new(), - path: cwd.join("src/test"), - doc_step: "doc-crate-test".to_string(), - build_step: "build-crate-test".to_string(), - test_step: "test-crate-test".to_string(), - bench_step: "bench-crate-test".to_string(), - version: String::new(), - }); - build.crates.insert("rustc-main".to_string(), ::Crate { - name: "rustc-main".to_string(), - deps: Vec::new(), - version: String::new(), - path: cwd.join("src/rustc-main"), - doc_step: "doc-crate-rustc-main".to_string(), - build_step: "build-crate-rustc-main".to_string(), - test_step: "test-crate-rustc-main".to_string(), - bench_step: "bench-crate-rustc-main".to_string(), - }); - return build - } - - #[test] - fn dist_baseline() { - let build = build(&["dist"], &[], &[]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - assert!(plan.iter().all(|s| s.host == "A" )); - assert!(plan.iter().all(|s| s.target == "A" )); - - let step = super::Step { - name: "", - stage: 2, - host: &build.build, - target: &build.build, - }; - - assert!(plan.contains(&step.name("dist-docs"))); - assert!(plan.contains(&step.name("dist-mingw"))); - assert!(plan.contains(&step.name("dist-rustc"))); - assert!(plan.contains(&step.name("dist-std"))); - assert!(plan.contains(&step.name("dist-src"))); - } - - #[test] - fn dist_with_targets() { - let build = build(&["dist"], &[], &["B"]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - assert!(plan.iter().all(|s| s.host == "A" )); - - let step = super::Step { - name: "", - stage: 2, - host: &build.build, - target: &build.build, - }; - - assert!(plan.contains(&step.name("dist-docs"))); - assert!(plan.contains(&step.name("dist-mingw"))); - assert!(plan.contains(&step.name("dist-rustc"))); - assert!(plan.contains(&step.name("dist-std"))); - assert!(plan.contains(&step.name("dist-src"))); - - assert!(plan.contains(&step.target("B").name("dist-docs"))); - assert!(plan.contains(&step.target("B").name("dist-mingw"))); - assert!(!plan.contains(&step.target("B").name("dist-rustc"))); - assert!(plan.contains(&step.target("B").name("dist-std"))); - assert!(!plan.contains(&step.target("B").name("dist-src"))); - } - - #[test] - fn dist_with_hosts() { - let build = build(&["dist"], &["B"], &[]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - - let step = super::Step { - name: "", - stage: 2, - host: &build.build, - target: &build.build, - }; - - assert!(!plan.iter().any(|s| s.host == "B")); - - assert!(plan.contains(&step.name("dist-docs"))); - assert!(plan.contains(&step.name("dist-mingw"))); - assert!(plan.contains(&step.name("dist-rustc"))); - assert!(plan.contains(&step.name("dist-std"))); - assert!(plan.contains(&step.name("dist-src"))); - - assert!(plan.contains(&step.target("B").name("dist-docs"))); - assert!(plan.contains(&step.target("B").name("dist-mingw"))); - assert!(plan.contains(&step.target("B").name("dist-rustc"))); - assert!(plan.contains(&step.target("B").name("dist-std"))); - assert!(!plan.contains(&step.target("B").name("dist-src"))); - } - - #[test] - fn dist_with_targets_and_hosts() { - let build = build(&["dist"], &["B"], &["C"]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - - let step = super::Step { - name: "", - stage: 2, - host: &build.build, - target: &build.build, - }; - - assert!(!plan.iter().any(|s| s.host == "B")); - assert!(!plan.iter().any(|s| s.host == "C")); - - assert!(plan.contains(&step.name("dist-docs"))); - assert!(plan.contains(&step.name("dist-mingw"))); - assert!(plan.contains(&step.name("dist-rustc"))); - assert!(plan.contains(&step.name("dist-std"))); - assert!(plan.contains(&step.name("dist-src"))); - - assert!(plan.contains(&step.target("B").name("dist-docs"))); - assert!(plan.contains(&step.target("B").name("dist-mingw"))); - assert!(plan.contains(&step.target("B").name("dist-rustc"))); - assert!(plan.contains(&step.target("B").name("dist-std"))); - assert!(!plan.contains(&step.target("B").name("dist-src"))); - - assert!(plan.contains(&step.target("C").name("dist-docs"))); - assert!(plan.contains(&step.target("C").name("dist-mingw"))); - assert!(!plan.contains(&step.target("C").name("dist-rustc"))); - assert!(plan.contains(&step.target("C").name("dist-std"))); - assert!(!plan.contains(&step.target("C").name("dist-src"))); - } - - #[test] - fn dist_target_with_target_flag() { - let build = build(&["dist", "--target=C"], &["B"], &["C"]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - - let step = super::Step { - name: "", - stage: 2, - host: &build.build, - target: &build.build, - }; - - assert!(!plan.iter().any(|s| s.target == "A")); - assert!(!plan.iter().any(|s| s.target == "B")); - assert!(!plan.iter().any(|s| s.host == "B")); - assert!(!plan.iter().any(|s| s.host == "C")); - - assert!(plan.contains(&step.target("C").name("dist-docs"))); - assert!(plan.contains(&step.target("C").name("dist-mingw"))); - assert!(!plan.contains(&step.target("C").name("dist-rustc"))); - assert!(plan.contains(&step.target("C").name("dist-std"))); - assert!(!plan.contains(&step.target("C").name("dist-src"))); - } - - #[test] - fn dist_host_with_target_flag() { - let build = build(&["dist", "--host=B", "--target=B"], &["B"], &["C"]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - - let step = super::Step { - name: "", - stage: 2, - host: &build.build, - target: &build.build, - }; - - assert!(!plan.iter().any(|s| s.target == "A")); - assert!(!plan.iter().any(|s| s.target == "C")); - assert!(!plan.iter().any(|s| s.host == "B")); - assert!(!plan.iter().any(|s| s.host == "C")); - - assert!(plan.contains(&step.target("B").name("dist-docs"))); - assert!(plan.contains(&step.target("B").name("dist-mingw"))); - assert!(plan.contains(&step.target("B").name("dist-rustc"))); - assert!(plan.contains(&step.target("B").name("dist-std"))); - assert!(plan.contains(&step.target("B").name("dist-src"))); - - let all = rules.expand(&plan); - println!("all rules: {:#?}", all); - assert!(!all.contains(&step.name("rustc"))); - assert!(!all.contains(&step.name("build-crate-test").stage(1))); - - // all stage0 compiles should be for the build target, A - for step in all.iter().filter(|s| s.stage == 0) { - if !step.name.contains("build-crate") { - continue - } - println!("step: {:?}", step); - assert!(step.host != "B"); - assert!(step.target != "B"); - assert!(step.host != "C"); - assert!(step.target != "C"); - } - } - - #[test] - fn build_default() { - let build = build(&["build"], &["B"], &["C"]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - - let step = super::Step { - name: "", - stage: 2, - host: &build.build, - target: &build.build, - }; - - // rustc built for all for of (A, B) x (A, B) - assert!(plan.contains(&step.name("librustc"))); - assert!(plan.contains(&step.target("B").name("librustc"))); - assert!(plan.contains(&step.host("B").target("A").name("librustc"))); - assert!(plan.contains(&step.host("B").target("B").name("librustc"))); - - // rustc never built for C - assert!(!plan.iter().any(|s| { - s.name.contains("rustc") && (s.host == "C" || s.target == "C") - })); - - // test built for everything - assert!(plan.contains(&step.name("libtest"))); - assert!(plan.contains(&step.target("B").name("libtest"))); - assert!(plan.contains(&step.host("B").target("A").name("libtest"))); - assert!(plan.contains(&step.host("B").target("B").name("libtest"))); - assert!(plan.contains(&step.host("A").target("C").name("libtest"))); - assert!(plan.contains(&step.host("B").target("C").name("libtest"))); - - let all = rules.expand(&plan); - println!("all rules: {:#?}", all); - assert!(all.contains(&step.name("rustc"))); - assert!(all.contains(&step.name("libstd"))); - } - - #[test] - fn build_filtered() { - let build = build(&["build", "--target=C"], &["B"], &["C"]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - - assert!(!plan.iter().any(|s| s.name.contains("rustc"))); - assert!(plan.iter().all(|s| { - !s.name.contains("test") || s.target == "C" - })); - } - - #[test] - fn test_default() { - let build = build(&["test"], &[], &[]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - assert!(plan.iter().all(|s| s.host == "A")); - assert!(plan.iter().all(|s| s.target == "A")); - - assert!(plan.iter().any(|s| s.name.contains("-ui"))); - assert!(plan.iter().any(|s| s.name.contains("cfail"))); - assert!(plan.iter().any(|s| s.name.contains("cfail-full"))); - assert!(plan.iter().any(|s| s.name.contains("codegen-units"))); - assert!(plan.iter().any(|s| s.name.contains("debuginfo"))); - assert!(plan.iter().any(|s| s.name.contains("docs"))); - assert!(plan.iter().any(|s| s.name.contains("error-index"))); - assert!(plan.iter().any(|s| s.name.contains("incremental"))); - assert!(plan.iter().any(|s| s.name.contains("linkchecker"))); - assert!(plan.iter().any(|s| s.name.contains("mir-opt"))); - assert!(plan.iter().any(|s| s.name.contains("pfail"))); - assert!(plan.iter().any(|s| s.name.contains("rfail"))); - assert!(plan.iter().any(|s| s.name.contains("rfail-full"))); - assert!(plan.iter().any(|s| s.name.contains("rmake"))); - assert!(plan.iter().any(|s| s.name.contains("rpass"))); - assert!(plan.iter().any(|s| s.name.contains("rpass-full"))); - assert!(plan.iter().any(|s| s.name.contains("rustc-all"))); - assert!(plan.iter().any(|s| s.name.contains("rustdoc"))); - assert!(plan.iter().any(|s| s.name.contains("std-all"))); - assert!(plan.iter().any(|s| s.name.contains("test-all"))); - assert!(plan.iter().any(|s| s.name.contains("tidy"))); - assert!(plan.iter().any(|s| s.name.contains("valgrind"))); - } - - #[test] - fn test_with_a_target() { - let build = build(&["test", "--target=C"], &[], &["C"]); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(plan.iter().all(|s| s.stage == 2)); - assert!(plan.iter().all(|s| s.host == "A")); - assert!(plan.iter().all(|s| s.target == "C")); - - assert!(plan.iter().any(|s| s.name.contains("-ui"))); - assert!(!plan.iter().any(|s| s.name.contains("ui-full"))); - assert!(plan.iter().any(|s| s.name.contains("cfail"))); - assert!(!plan.iter().any(|s| s.name.contains("cfail-full"))); - assert!(plan.iter().any(|s| s.name.contains("codegen-units"))); - assert!(plan.iter().any(|s| s.name.contains("debuginfo"))); - assert!(!plan.iter().any(|s| s.name.contains("docs"))); - assert!(!plan.iter().any(|s| s.name.contains("error-index"))); - assert!(plan.iter().any(|s| s.name.contains("incremental"))); - assert!(!plan.iter().any(|s| s.name.contains("linkchecker"))); - assert!(plan.iter().any(|s| s.name.contains("mir-opt"))); - assert!(plan.iter().any(|s| s.name.contains("pfail"))); - assert!(plan.iter().any(|s| s.name.contains("rfail"))); - assert!(!plan.iter().any(|s| s.name.contains("rfail-full"))); - assert!(!plan.iter().any(|s| s.name.contains("rmake"))); - assert!(plan.iter().any(|s| s.name.contains("rpass"))); - assert!(!plan.iter().any(|s| s.name.contains("rpass-full"))); - assert!(!plan.iter().any(|s| s.name.contains("rustc-all"))); - assert!(!plan.iter().any(|s| s.name.contains("rustdoc"))); - assert!(plan.iter().any(|s| s.name.contains("std-all"))); - assert!(plan.iter().any(|s| s.name.contains("test-all"))); - assert!(!plan.iter().any(|s| s.name.contains("tidy"))); - assert!(plan.iter().any(|s| s.name.contains("valgrind"))); - } - - #[test] - fn test_disable_docs() { - let build = build_(&["test"], &[], &[], false); - let rules = super::build_rules(&build); - let plan = rules.plan(); - println!("rules: {:#?}", plan); - assert!(!plan.iter().any(|s| { - s.name.contains("doc-") || s.name.contains("default:doc") - })); - // none of the dependencies should be a doc rule either - assert!(!plan.iter().any(|s| { - rules.rules[s.name].deps.iter().any(|dep| { - let dep = dep(&rules.sbuild.name(s.name)); - dep.name.contains("doc-") || dep.name.contains("default:doc") - }) - })); - } -} diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs new file mode 100644 index 0000000000..e759f1a3e6 --- /dev/null +++ b/src/bootstrap/tool.rs @@ -0,0 +1,425 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::fs; +use std::env; +use std::path::PathBuf; +use std::process::Command; + +use Mode; +use Compiler; +use builder::{Step, RunConfig, ShouldRun, Builder}; +use util::{copy, exe, add_lib_path}; +use compile::{self, libtest_stamp, libstd_stamp, librustc_stamp}; +use native; +use channel::GitInfo; +use cache::Interned; + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct CleanTools { + pub compiler: Compiler, + pub target: Interned, + pub mode: Mode, +} + +impl Step for CleanTools { + type Output = (); + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Build a tool in `src/tools` + /// + /// This will build the specified tool with the specified `host` compiler in + /// `stage` into the normal cargo output directory. + fn run(self, builder: &Builder) { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + let mode = self.mode; + + let stamp = match mode { + Mode::Libstd => libstd_stamp(build, compiler, target), + Mode::Libtest => libtest_stamp(build, compiler, target), + Mode::Librustc => librustc_stamp(build, compiler, target), + _ => panic!(), + }; + let out_dir = build.cargo_out(compiler, Mode::Tool, target); + build.clear_if_dirty(&out_dir, &stamp); + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +struct ToolBuild { + compiler: Compiler, + target: Interned, + tool: &'static str, + mode: Mode, +} + +impl Step for ToolBuild { + type Output = PathBuf; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.never() + } + + /// Build a tool in `src/tools` + /// + /// This will build the specified tool with the specified `host` compiler in + /// `stage` into the normal cargo output directory. + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let compiler = self.compiler; + let target = self.target; + let tool = self.tool; + + match self.mode { + Mode::Libstd => builder.ensure(compile::Std { compiler, target }), + Mode::Libtest => builder.ensure(compile::Test { compiler, target }), + Mode::Librustc => builder.ensure(compile::Rustc { compiler, target }), + Mode::Tool => panic!("unexpected Mode::Tool for tool build") + } + + let _folder = build.fold_output(|| format!("stage{}-{}", compiler.stage, tool)); + println!("Building stage{} tool {} ({})", compiler.stage, tool, target); + + let mut cargo = prepare_tool_cargo(builder, compiler, target, tool); + build.run(&mut cargo); + build.cargo_out(compiler, Mode::Tool, target).join(exe(tool, &compiler.host)) + } +} + +fn prepare_tool_cargo( + builder: &Builder, + compiler: Compiler, + target: Interned, + tool: &'static str, +) -> Command { + let build = builder.build; + let mut cargo = builder.cargo(compiler, Mode::Tool, target, "build"); + let dir = build.src.join("src/tools").join(tool); + cargo.arg("--manifest-path").arg(dir.join("Cargo.toml")); + + // We don't want to build tools dynamically as they'll be running across + // stages and such and it's just easier if they're not dynamically linked. + cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); + + if let Some(dir) = build.openssl_install_dir(target) { + cargo.env("OPENSSL_STATIC", "1"); + cargo.env("OPENSSL_DIR", dir); + cargo.env("LIBZ_SYS_STATIC", "1"); + } + + cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel); + + let info = GitInfo::new(&build.config, &dir); + if let Some(sha) = info.sha() { + cargo.env("CFG_COMMIT_HASH", sha); + } + if let Some(sha_short) = info.sha_short() { + cargo.env("CFG_SHORT_COMMIT_HASH", sha_short); + } + if let Some(date) = info.commit_date() { + cargo.env("CFG_COMMIT_DATE", date); + } + cargo +} + +macro_rules! tool { + ($($name:ident, $path:expr, $tool_name:expr, $mode:expr;)+) => { + #[derive(Copy, Clone)] + pub enum Tool { + $( + $name, + )+ + } + + impl<'a> Builder<'a> { + pub fn tool_exe(&self, tool: Tool) -> PathBuf { + match tool { + $(Tool::$name => + self.ensure($name { + compiler: self.compiler(0, self.build.build), + target: self.build.build, + }), + )+ + } + } + } + + $( + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + pub struct $name { + pub compiler: Compiler, + pub target: Interned, + } + + impl Step for $name { + type Output = PathBuf; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path($path) + } + + fn make_run(run: RunConfig) { + run.builder.ensure($name { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: $tool_name, + mode: $mode, + }) + } + } + )+ + } +} + +tool!( + Rustbook, "src/tools/rustbook", "rustbook", Mode::Librustc; + ErrorIndex, "src/tools/error_index_generator", "error_index_generator", Mode::Librustc; + UnstableBookGen, "src/tools/unstable-book-gen", "unstable-book-gen", Mode::Libstd; + Tidy, "src/tools/tidy", "tidy", Mode::Libstd; + Linkchecker, "src/tools/linkchecker", "linkchecker", Mode::Libstd; + CargoTest, "src/tools/cargotest", "cargotest", Mode::Libstd; + Compiletest, "src/tools/compiletest", "compiletest", Mode::Libtest; + BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::Libstd; + RemoteTestClient, "src/tools/remote-test-client", "remote-test-client", Mode::Libstd; + RustInstaller, "src/tools/rust-installer", "rust-installer", Mode::Libstd; +); + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct RemoteTestServer { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for RemoteTestServer { + type Output = PathBuf; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/remote-test-server") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(RemoteTestServer { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "remote-test-server", + mode: Mode::Libstd, + }) + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rustdoc { + pub host: Interned, +} + +impl Step for Rustdoc { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + run.path("src/tools/rustdoc") + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rustdoc { + host: run.host, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + let build = builder.build; + let target_compiler = builder.compiler(builder.top_stage, self.host); + let target = target_compiler.host; + let build_compiler = if target_compiler.stage == 0 { + builder.compiler(0, builder.build.build) + } else if target_compiler.stage >= 2 { + // Past stage 2, we consider the compiler to be ABI-compatible and hence capable of + // building rustdoc itself. + builder.compiler(target_compiler.stage, builder.build.build) + } else { + // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise + // we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage + // compilers, which isn't what we want. + builder.compiler(target_compiler.stage - 1, builder.build.build) + }; + + builder.ensure(compile::Rustc { compiler: build_compiler, target }); + + let _folder = build.fold_output(|| format!("stage{}-rustdoc", target_compiler.stage)); + println!("Building rustdoc for stage{} ({})", target_compiler.stage, target_compiler.host); + + let mut cargo = prepare_tool_cargo(builder, build_compiler, target, "rustdoc"); + build.run(&mut cargo); + // Cargo adds a number of paths to the dylib search path on windows, which results in + // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool" + // rustdoc a different name. + let tool_rustdoc = build.cargo_out(build_compiler, Mode::Tool, target) + .join(exe("rustdoc-tool-binary", &target_compiler.host)); + + // don't create a stage0-sysroot/bin directory. + if target_compiler.stage > 0 { + let sysroot = builder.sysroot(target_compiler); + let bindir = sysroot.join("bin"); + t!(fs::create_dir_all(&bindir)); + let bin_rustdoc = bindir.join(exe("rustdoc", &*target_compiler.host)); + let _ = fs::remove_file(&bin_rustdoc); + copy(&tool_rustdoc, &bin_rustdoc); + bin_rustdoc + } else { + tool_rustdoc + } + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Cargo { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for Cargo { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/tools/cargo").default_condition(builder.build.config.extended) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Cargo { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + builder.ensure(native::Openssl { + target: self.target, + }); + // Cargo depends on procedural macros, which requires a full host + // compiler to be available, so we need to depend on that. + builder.ensure(compile::Rustc { + compiler: self.compiler, + target: builder.build.build, + }); + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "cargo", + mode: Mode::Librustc, + }) + } +} + +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub struct Rls { + pub compiler: Compiler, + pub target: Interned, +} + +impl Step for Rls { + type Output = PathBuf; + const DEFAULT: bool = true; + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun) -> ShouldRun { + let builder = run.builder; + run.path("src/tools/rls").default_condition(builder.build.config.extended) + } + + fn make_run(run: RunConfig) { + run.builder.ensure(Rls { + compiler: run.builder.compiler(run.builder.top_stage, run.builder.build.build), + target: run.target, + }); + } + + fn run(self, builder: &Builder) -> PathBuf { + builder.ensure(native::Openssl { + target: self.target, + }); + // RLS depends on procedural macros, which requires a full host + // compiler to be available, so we need to depend on that. + builder.ensure(compile::Rustc { + compiler: self.compiler, + target: builder.build.build, + }); + builder.ensure(ToolBuild { + compiler: self.compiler, + target: self.target, + tool: "rls", + mode: Mode::Librustc, + }) + } +} + +impl<'a> Builder<'a> { + /// Get a `Command` which is ready to run `tool` in `stage` built for + /// `host`. + pub fn tool_cmd(&self, tool: Tool) -> Command { + let mut cmd = Command::new(self.tool_exe(tool)); + let compiler = self.compiler(0, self.build.build); + self.prepare_tool_cmd(compiler, &mut cmd); + cmd + } + + /// Prepares the `cmd` provided to be able to run the `compiler` provided. + /// + /// Notably this munges the dynamic library lookup path to point to the + /// right location to run `compiler`. + fn prepare_tool_cmd(&self, compiler: Compiler, cmd: &mut Command) { + let host = &compiler.host; + let mut paths: Vec = vec![ + PathBuf::from(&self.sysroot_libdir(compiler, compiler.host)), + self.cargo_out(compiler, Mode::Tool, *host).join("deps"), + ]; + + // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make + // mode) and that C compiler may need some extra PATH modification. Do + // so here. + if compiler.host.contains("msvc") { + let curpaths = env::var_os("PATH").unwrap_or_default(); + let curpaths = env::split_paths(&curpaths).collect::>(); + for &(ref k, ref v) in self.cc[&compiler.host].0.env() { + if k != "PATH" { + continue + } + for path in env::split_paths(v) { + if !curpaths.contains(&path) { + paths.push(path); + } + } + } + } + add_lib_path(paths, cmd); + } +} diff --git a/src/build_helper/lib.rs b/src/build_helper/lib.rs index 7011261ab6..8b4c7f2ac3 100644 --- a/src/build_helper/lib.rs +++ b/src/build_helper/lib.rs @@ -13,7 +13,6 @@ extern crate filetime; use std::fs::File; -use std::io; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::{fs, env}; @@ -211,7 +210,7 @@ pub fn native_lib_boilerplate(src_name: &str, let out_dir = env::var_os("RUSTBUILD_NATIVE_DIR").unwrap_or(env::var_os("OUT_DIR").unwrap()); let out_dir = PathBuf::from(out_dir).join(out_name); - t!(create_dir_racy(&out_dir)); + t!(fs::create_dir_all(&out_dir)); if link_name.contains('=') { println!("cargo:rustc-link-lib={}", link_name); } else { @@ -260,21 +259,3 @@ fn fail(s: &str) -> ! { println!("\n\n{}\n\n", s); std::process::exit(1); } - -fn create_dir_racy(path: &Path) -> io::Result<()> { - match fs::create_dir(path) { - Ok(()) => return Ok(()), - Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()), - Err(ref e) if e.kind() == io::ErrorKind::NotFound => {} - Err(e) => return Err(e), - } - match path.parent() { - Some(p) => try!(create_dir_racy(p)), - None => return Err(io::Error::new(io::ErrorKind::Other, "failed to create whole tree")), - } - match fs::create_dir(path) { - Ok(()) => Ok(()), - Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()), - Err(e) => Err(e), - } -} diff --git a/src/ci/docker/arm-android/Dockerfile b/src/ci/docker/arm-android/Dockerfile index 6cdaf6acfe..49d07d28d3 100644 --- a/src/ci/docker/arm-android/Dockerfile +++ b/src/ci/docker/arm-android/Dockerfile @@ -3,9 +3,6 @@ FROM ubuntu:16.04 COPY scripts/android-base-apt-get.sh /scripts/ RUN sh /scripts/android-base-apt-get.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/android-ndk.sh /scripts/ RUN . /scripts/android-ndk.sh && \ download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm 9 @@ -38,4 +35,4 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh COPY scripts/android-start-emulator.sh /scripts/ -ENTRYPOINT ["/usr/bin/dumb-init", "--", "/scripts/android-start-emulator.sh"] +ENTRYPOINT ["/scripts/android-start-emulator.sh"] diff --git a/src/ci/docker/armhf-gnu/Dockerfile b/src/ci/docker/armhf-gnu/Dockerfile index d289a93c35..191f8e3a28 100644 --- a/src/ci/docker/armhf-gnu/Dockerfile +++ b/src/ci/docker/armhf-gnu/Dockerfile @@ -63,24 +63,19 @@ RUN curl http://cdimage.ubuntu.com/ubuntu-base/releases/16.04/release/ubuntu-bas # Copy over our init script, which starts up our test server and also a few # other misc tasks. -COPY armhf-gnu/rcS rootfs/etc/init.d/rcS +COPY scripts/qemu-bare-bones-rcS rootfs/etc/init.d/rcS RUN chmod +x rootfs/etc/init.d/rcS # Helper to quickly fill the entropy pool in the kernel. -COPY armhf-gnu/addentropy.c /tmp/ +COPY scripts/qemu-bare-bones-addentropy.c /tmp/addentropy.c RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static # TODO: What is this?! RUN curl -O http://ftp.nl.debian.org/debian/dists/jessie/main/installer-armhf/current/images/device-tree/vexpress-v2p-ca15-tc1.dtb -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS \ --target=arm-unknown-linux-gnueabihf \ --qemu-armhf-rootfs=/tmp/rootfs diff --git a/src/ci/docker/asmjs/Dockerfile b/src/ci/docker/asmjs/Dockerfile index 1c39e8523d..28caf1fb57 100644 --- a/src/ci/docker/asmjs/Dockerfile +++ b/src/ci/docker/asmjs/Dockerfile @@ -13,9 +13,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gdb \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/emscripten.sh /scripts/ RUN bash /scripts/emscripten.sh @@ -35,5 +32,3 @@ ENV SCRIPT python2.7 ../x.py test --target $TARGETS COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/cross/Dockerfile b/src/ci/docker/cross/Dockerfile index 70dfbb53da..a83bbe9c60 100644 --- a/src/ci/docker/cross/Dockerfile +++ b/src/ci/docker/cross/Dockerfile @@ -21,9 +21,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libssl-dev \ pkg-config -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - WORKDIR /tmp COPY cross/build-rumprun.sh /tmp/ @@ -38,6 +35,9 @@ RUN ./install-mips-musl.sh COPY cross/install-mipsel-musl.sh /tmp/ RUN ./install-mipsel-musl.sh +COPY cross/install-x86_64-redox.sh /tmp/ +RUN ./install-x86_64-redox.sh + ENV TARGETS=asmjs-unknown-emscripten ENV TARGETS=$TARGETS,wasm32-unknown-emscripten ENV TARGETS=$TARGETS,x86_64-rumprun-netbsd @@ -47,10 +47,12 @@ ENV TARGETS=$TARGETS,arm-unknown-linux-musleabi ENV TARGETS=$TARGETS,arm-unknown-linux-musleabihf ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabihf ENV TARGETS=$TARGETS,sparc64-unknown-linux-gnu +ENV TARGETS=$TARGETS,x86_64-unknown-redox ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \ CC_mips_unknown_linux_musl=mips-openwrt-linux-gcc \ - CC_sparc64_unknown_linux_gnu=sparc64-linux-gnu-gcc + CC_sparc64_unknown_linux_gnu=sparc64-linux-gnu-gcc \ + CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc # Suppress some warnings in the openwrt toolchains we downloaded ENV STAGING_DIR=/tmp @@ -66,5 +68,3 @@ ENV SCRIPT python2.7 ../x.py dist --target $TARGETS # sccache COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/cross/install-mips-musl.sh b/src/ci/docker/cross/install-mips-musl.sh index 416bb75155..eeb4aacbbb 100755 --- a/src/ci/docker/cross/install-mips-musl.sh +++ b/src/ci/docker/cross/install-mips-musl.sh @@ -15,7 +15,7 @@ mkdir /usr/local/mips-linux-musl # originally from # https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/ # OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2 -URL="https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror" +URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror" FILE="OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2" curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mips-linux-musl --strip-components=2 diff --git a/src/ci/docker/cross/install-mipsel-musl.sh b/src/ci/docker/cross/install-mipsel-musl.sh index 9744b242fb..74b6a10e77 100755 --- a/src/ci/docker/cross/install-mipsel-musl.sh +++ b/src/ci/docker/cross/install-mipsel-musl.sh @@ -15,7 +15,7 @@ mkdir /usr/local/mipsel-linux-musl # Note that this originally came from: # https://downloads.openwrt.org/snapshots/trunk/malta/generic/ # OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 -URL="https://s3.amazonaws.com/rust-lang-ci/libc" +URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc" FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2" curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2 diff --git a/src/ci/docker/cross/install-x86_64-redox.sh b/src/ci/docker/cross/install-x86_64-redox.sh new file mode 100755 index 0000000000..8e052c4acd --- /dev/null +++ b/src/ci/docker/cross/install-x86_64-redox.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2017 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +# ignore-tidy-linelength + +set -ex + +apt-get update +apt-get install -y --no-install-recommends software-properties-common apt-transport-https + +apt-key adv --batch --yes --keyserver keyserver.ubuntu.com --recv-keys AA12E97F0881517F +add-apt-repository -y 'deb https://static.redox-os.org/toolchain/apt /' + +apt-get update +apt-get install -y x86-64-unknown-redox-gcc diff --git a/src/ci/docker/disabled/aarch64-gnu/Dockerfile b/src/ci/docker/disabled/aarch64-gnu/Dockerfile new file mode 100644 index 0000000000..9a0e453122 --- /dev/null +++ b/src/ci/docker/disabled/aarch64-gnu/Dockerfile @@ -0,0 +1,80 @@ +FROM ubuntu:16.04 + +RUN apt-get update -y && apt-get install -y --no-install-recommends \ + bc \ + bzip2 \ + ca-certificates \ + cmake \ + cpio \ + curl \ + file \ + g++ \ + gcc-aarch64-linux-gnu \ + git \ + libc6-dev \ + libc6-dev-arm64-cross \ + make \ + python2.7 \ + qemu-system-aarch64 \ + xz-utils + +ENV ARCH=arm64 \ + CROSS_COMPILE=aarch64-linux-gnu- + +WORKDIR /build + +# Compile the kernel that we're going to run and be emulating with. This is +# basically just done to be compatible with the QEMU target that we're going +# to be using when running tests. If any other kernel works or if any +# other QEMU target works with some other stock kernel, we can use that too! +# +# The `config` config file was a previously generated config file for +# the kernel. This file was generated by running `make defconfig` +# followed by `make menuconfig` and then enabling the IPv6 protocol page. +COPY disabled/aarch64-gnu/config /build/.config +RUN curl https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.4.42.tar.xz | \ + tar xJf - && \ + cd /build/linux-4.4.42 && \ + cp /build/.config . && \ + make -j$(nproc) all && \ + cp arch/arm64/boot/Image /tmp && \ + cd /build && \ + rm -rf linux-4.4.42 + +# Compile an instance of busybox as this provides a lightweight system and init +# binary which we will boot into. Only trick here is configuring busybox to +# build static binaries. +RUN curl https://www.busybox.net/downloads/busybox-1.21.1.tar.bz2 | tar xjf - && \ + cd busybox-1.21.1 && \ + make defconfig && \ + sed -i 's/.*CONFIG_STATIC.*/CONFIG_STATIC=y/' .config && \ + make -j$(nproc) && \ + make install && \ + mv _install /tmp/rootfs && \ + cd /build && \ + rm -rf busybox-1.12.1 + +# Download the ubuntu rootfs, which we'll use as a chroot for all our tests. +WORKDIR /tmp +RUN mkdir rootfs/ubuntu +RUN curl http://cdimage.ubuntu.com/ubuntu-base/releases/16.04/release/ubuntu-base-16.04-core-arm64.tar.gz | \ + tar xzf - -C rootfs/ubuntu && \ + cd rootfs && mkdir proc sys dev etc etc/init.d + +# Copy over our init script, which starts up our test server and also a few +# other misc tasks. +COPY scripts/qemu-bare-bones-rcS rootfs/etc/init.d/rcS +RUN chmod +x rootfs/etc/init.d/rcS + +# Helper to quickly fill the entropy pool in the kernel. +COPY scripts/qemu-bare-bones-addentropy.c /tmp/addentropy.c +RUN aarch64-linux-gnu-gcc addentropy.c -o rootfs/addentropy -static + +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +ENV RUST_CONFIGURE_ARGS \ + --target=aarch64-unknown-linux-gnu \ + --qemu-aarch64-rootfs=/tmp/rootfs +ENV SCRIPT python2.7 ../x.py test --target aarch64-unknown-linux-gnu +ENV NO_CHANGE_USER=1 diff --git a/src/ci/docker/disabled/aarch64-gnu/config b/src/ci/docker/disabled/aarch64-gnu/config new file mode 100644 index 0000000000..a179253239 --- /dev/null +++ b/src/ci/docker/disabled/aarch64-gnu/config @@ -0,0 +1,3100 @@ +# +# Automatically generated file; DO NOT EDIT. +# Linux/arm64 4.4.42 Kernel Configuration +# +CONFIG_ARM64=y +CONFIG_64BIT=y +CONFIG_ARCH_PHYS_ADDR_T_64BIT=y +CONFIG_MMU=y +CONFIG_STACKTRACE_SUPPORT=y +CONFIG_ILLEGAL_POINTER_VALUE=0xdead000000000000 +CONFIG_LOCKDEP_SUPPORT=y +CONFIG_TRACE_IRQFLAGS_SUPPORT=y +CONFIG_RWSEM_XCHGADD_ALGORITHM=y +CONFIG_GENERIC_BUG=y +CONFIG_GENERIC_BUG_RELATIVE_POINTERS=y +CONFIG_GENERIC_HWEIGHT=y +CONFIG_GENERIC_CSUM=y +CONFIG_GENERIC_CALIBRATE_DELAY=y +CONFIG_ZONE_DMA=y +CONFIG_HAVE_GENERIC_RCU_GUP=y +CONFIG_ARCH_DMA_ADDR_T_64BIT=y +CONFIG_NEED_DMA_MAP_STATE=y +CONFIG_NEED_SG_DMA_LENGTH=y +CONFIG_SMP=y +CONFIG_SWIOTLB=y +CONFIG_IOMMU_HELPER=y +CONFIG_KERNEL_MODE_NEON=y +CONFIG_FIX_EARLYCON_MEM=y +CONFIG_PGTABLE_LEVELS=3 +CONFIG_DEFCONFIG_LIST="/lib/modules/$UNAME_RELEASE/.config" +CONFIG_IRQ_WORK=y +CONFIG_BUILDTIME_EXTABLE_SORT=y + +# +# General setup +# +CONFIG_INIT_ENV_ARG_LIMIT=32 +CONFIG_CROSS_COMPILE="" +# CONFIG_COMPILE_TEST is not set +CONFIG_LOCALVERSION="" +# CONFIG_LOCALVERSION_AUTO is not set +CONFIG_DEFAULT_HOSTNAME="(none)" +CONFIG_SWAP=y +CONFIG_SYSVIPC=y +CONFIG_SYSVIPC_SYSCTL=y +CONFIG_POSIX_MQUEUE=y +CONFIG_POSIX_MQUEUE_SYSCTL=y +CONFIG_CROSS_MEMORY_ATTACH=y +CONFIG_FHANDLE=y +CONFIG_USELIB=y +CONFIG_AUDIT=y +CONFIG_HAVE_ARCH_AUDITSYSCALL=y +# CONFIG_AUDITSYSCALL is not set + +# +# IRQ subsystem +# +CONFIG_GENERIC_IRQ_PROBE=y +CONFIG_GENERIC_IRQ_SHOW=y +CONFIG_GENERIC_IRQ_SHOW_LEVEL=y +CONFIG_GENERIC_IRQ_MIGRATION=y +CONFIG_HARDIRQS_SW_RESEND=y +CONFIG_GENERIC_IRQ_CHIP=y +CONFIG_IRQ_DOMAIN=y +CONFIG_IRQ_DOMAIN_HIERARCHY=y +CONFIG_GENERIC_MSI_IRQ=y +CONFIG_GENERIC_MSI_IRQ_DOMAIN=y +CONFIG_HANDLE_DOMAIN_IRQ=y +# CONFIG_IRQ_DOMAIN_DEBUG is not set +CONFIG_IRQ_FORCED_THREADING=y +CONFIG_SPARSE_IRQ=y +CONFIG_GENERIC_TIME_VSYSCALL=y +CONFIG_GENERIC_CLOCKEVENTS=y +CONFIG_ARCH_HAS_TICK_BROADCAST=y +CONFIG_GENERIC_CLOCKEVENTS_BROADCAST=y + +# +# Timers subsystem +# +CONFIG_TICK_ONESHOT=y +CONFIG_NO_HZ_COMMON=y +# CONFIG_HZ_PERIODIC is not set +CONFIG_NO_HZ_IDLE=y +# CONFIG_NO_HZ_FULL is not set +# CONFIG_NO_HZ is not set +CONFIG_HIGH_RES_TIMERS=y + +# +# CPU/Task time and stats accounting +# +CONFIG_TICK_CPU_ACCOUNTING=y +# CONFIG_VIRT_CPU_ACCOUNTING_GEN is not set +CONFIG_BSD_PROCESS_ACCT=y +CONFIG_BSD_PROCESS_ACCT_V3=y +CONFIG_TASKSTATS=y +CONFIG_TASK_DELAY_ACCT=y +CONFIG_TASK_XACCT=y +CONFIG_TASK_IO_ACCOUNTING=y + +# +# RCU Subsystem +# +CONFIG_PREEMPT_RCU=y +# CONFIG_RCU_EXPERT is not set +CONFIG_SRCU=y +# CONFIG_TASKS_RCU is not set +CONFIG_RCU_STALL_COMMON=y +# CONFIG_TREE_RCU_TRACE is not set +# CONFIG_RCU_EXPEDITE_BOOT is not set +CONFIG_BUILD_BIN2C=y +CONFIG_IKCONFIG=y +CONFIG_IKCONFIG_PROC=y +CONFIG_LOG_BUF_SHIFT=14 +CONFIG_LOG_CPU_MAX_BUF_SHIFT=12 +CONFIG_GENERIC_SCHED_CLOCK=y +CONFIG_CGROUPS=y +# CONFIG_CGROUP_DEBUG is not set +# CONFIG_CGROUP_FREEZER is not set +# CONFIG_CGROUP_PIDS is not set +# CONFIG_CGROUP_DEVICE is not set +# CONFIG_CPUSETS is not set +# CONFIG_CGROUP_CPUACCT is not set +CONFIG_PAGE_COUNTER=y +CONFIG_MEMCG=y +CONFIG_MEMCG_SWAP=y +CONFIG_MEMCG_SWAP_ENABLED=y +CONFIG_MEMCG_KMEM=y +CONFIG_CGROUP_HUGETLB=y +# CONFIG_CGROUP_PERF is not set +CONFIG_CGROUP_SCHED=y +CONFIG_FAIR_GROUP_SCHED=y +# CONFIG_CFS_BANDWIDTH is not set +# CONFIG_RT_GROUP_SCHED is not set +# CONFIG_BLK_CGROUP is not set +# CONFIG_CHECKPOINT_RESTORE is not set +CONFIG_NAMESPACES=y +# CONFIG_UTS_NS is not set +# CONFIG_IPC_NS is not set +# CONFIG_USER_NS is not set +CONFIG_PID_NS=y +# CONFIG_NET_NS is not set +CONFIG_SCHED_AUTOGROUP=y +# CONFIG_SYSFS_DEPRECATED is not set +# CONFIG_RELAY is not set +CONFIG_BLK_DEV_INITRD=y +CONFIG_INITRAMFS_SOURCE="" +CONFIG_RD_GZIP=y +CONFIG_RD_BZIP2=y +CONFIG_RD_LZMA=y +CONFIG_RD_XZ=y +CONFIG_RD_LZO=y +CONFIG_RD_LZ4=y +# CONFIG_CC_OPTIMIZE_FOR_SIZE is not set +CONFIG_SYSCTL=y +CONFIG_ANON_INODES=y +CONFIG_HAVE_UID16=y +CONFIG_SYSCTL_EXCEPTION_TRACE=y +CONFIG_BPF=y +# CONFIG_EXPERT is not set +CONFIG_UID16=y +CONFIG_MULTIUSER=y +# CONFIG_SGETMASK_SYSCALL is not set +CONFIG_SYSFS_SYSCALL=y +# CONFIG_SYSCTL_SYSCALL is not set +CONFIG_KALLSYMS=y +CONFIG_KALLSYMS_ALL=y +CONFIG_PRINTK=y +CONFIG_BUG=y +CONFIG_ELF_CORE=y +CONFIG_BASE_FULL=y +CONFIG_FUTEX=y +CONFIG_EPOLL=y +CONFIG_SIGNALFD=y +CONFIG_TIMERFD=y +CONFIG_EVENTFD=y +# CONFIG_BPF_SYSCALL is not set +CONFIG_SHMEM=y +CONFIG_AIO=y +CONFIG_ADVISE_SYSCALLS=y +# CONFIG_USERFAULTFD is not set +CONFIG_PCI_QUIRKS=y +CONFIG_MEMBARRIER=y +# CONFIG_EMBEDDED is not set +CONFIG_HAVE_PERF_EVENTS=y +CONFIG_PERF_USE_VMALLOC=y + +# +# Kernel Performance Events And Counters +# +CONFIG_PERF_EVENTS=y +# CONFIG_DEBUG_PERF_USE_VMALLOC is not set +CONFIG_VM_EVENT_COUNTERS=y +CONFIG_SLUB_DEBUG=y +# CONFIG_COMPAT_BRK is not set +# CONFIG_SLAB is not set +CONFIG_SLUB=y +CONFIG_SLUB_CPU_PARTIAL=y +# CONFIG_SYSTEM_DATA_VERIFICATION is not set +CONFIG_PROFILING=y +CONFIG_JUMP_LABEL=y +# CONFIG_STATIC_KEYS_SELFTEST is not set +# CONFIG_UPROBES is not set +# CONFIG_HAVE_64BIT_ALIGNED_ACCESS is not set +CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS=y +CONFIG_HAVE_ARCH_TRACEHOOK=y +CONFIG_HAVE_DMA_ATTRS=y +CONFIG_HAVE_DMA_CONTIGUOUS=y +CONFIG_GENERIC_SMP_IDLE_THREAD=y +CONFIG_GENERIC_IDLE_POLL_SETUP=y +CONFIG_HAVE_CLK=y +CONFIG_HAVE_DMA_API_DEBUG=y +CONFIG_HAVE_HW_BREAKPOINT=y +CONFIG_HAVE_PERF_REGS=y +CONFIG_HAVE_PERF_USER_STACK_DUMP=y +CONFIG_HAVE_ARCH_JUMP_LABEL=y +CONFIG_HAVE_RCU_TABLE_FREE=y +CONFIG_HAVE_ALIGNED_STRUCT_PAGE=y +CONFIG_HAVE_CMPXCHG_LOCAL=y +CONFIG_HAVE_CMPXCHG_DOUBLE=y +CONFIG_ARCH_WANT_COMPAT_IPC_PARSE_VERSION=y +CONFIG_HAVE_ARCH_SECCOMP_FILTER=y +CONFIG_HAVE_CC_STACKPROTECTOR=y +# CONFIG_CC_STACKPROTECTOR is not set +CONFIG_CC_STACKPROTECTOR_NONE=y +# CONFIG_CC_STACKPROTECTOR_REGULAR is not set +# CONFIG_CC_STACKPROTECTOR_STRONG is not set +CONFIG_HAVE_CONTEXT_TRACKING=y +CONFIG_HAVE_VIRT_CPU_ACCOUNTING_GEN=y +CONFIG_HAVE_ARCH_TRANSPARENT_HUGEPAGE=y +CONFIG_MODULES_USE_ELF_RELA=y +CONFIG_ARCH_HAS_ELF_RANDOMIZE=y +CONFIG_CLONE_BACKWARDS=y +CONFIG_OLD_SIGSUSPEND3=y +CONFIG_COMPAT_OLD_SIGACTION=y + +# +# GCOV-based kernel profiling +# +# CONFIG_GCOV_KERNEL is not set +CONFIG_ARCH_HAS_GCOV_PROFILE_ALL=y +CONFIG_HAVE_GENERIC_DMA_COHERENT=y +CONFIG_SLABINFO=y +CONFIG_RT_MUTEXES=y +CONFIG_BASE_SMALL=0 +CONFIG_MODULES=y +# CONFIG_MODULE_FORCE_LOAD is not set +CONFIG_MODULE_UNLOAD=y +# CONFIG_MODULE_FORCE_UNLOAD is not set +# CONFIG_MODVERSIONS is not set +# CONFIG_MODULE_SRCVERSION_ALL is not set +# CONFIG_MODULE_SIG is not set +# CONFIG_MODULE_COMPRESS is not set +CONFIG_MODULES_TREE_LOOKUP=y +CONFIG_BLOCK=y +# CONFIG_BLK_DEV_BSG is not set +# CONFIG_BLK_DEV_BSGLIB is not set +# CONFIG_BLK_DEV_INTEGRITY is not set +# CONFIG_BLK_CMDLINE_PARSER is not set + +# +# Partition Types +# +# CONFIG_PARTITION_ADVANCED is not set +CONFIG_MSDOS_PARTITION=y +CONFIG_EFI_PARTITION=y +CONFIG_BLOCK_COMPAT=y + +# +# IO Schedulers +# +CONFIG_IOSCHED_NOOP=y +# CONFIG_IOSCHED_DEADLINE is not set +CONFIG_IOSCHED_CFQ=y +CONFIG_DEFAULT_CFQ=y +# CONFIG_DEFAULT_NOOP is not set +CONFIG_DEFAULT_IOSCHED="cfq" +CONFIG_PREEMPT_NOTIFIERS=y +CONFIG_UNINLINE_SPIN_UNLOCK=y +CONFIG_ARCH_SUPPORTS_ATOMIC_RMW=y +CONFIG_MUTEX_SPIN_ON_OWNER=y +CONFIG_RWSEM_SPIN_ON_OWNER=y +CONFIG_LOCK_SPIN_ON_OWNER=y +CONFIG_FREEZER=y + +# +# Platform selection +# +CONFIG_ARCH_BCM_IPROC=y +CONFIG_ARCH_BERLIN=y +CONFIG_ARCH_EXYNOS=y +CONFIG_ARCH_EXYNOS7=y +CONFIG_ARCH_LAYERSCAPE=y +CONFIG_ARCH_HISI=y +CONFIG_ARCH_MEDIATEK=y +CONFIG_ARCH_QCOM=y +CONFIG_ARCH_ROCKCHIP=y +CONFIG_ARCH_SEATTLE=y +CONFIG_ARCH_STRATIX10=y +CONFIG_ARCH_TEGRA=y +CONFIG_ARCH_TEGRA_132_SOC=y +CONFIG_ARCH_SPRD=y +CONFIG_ARCH_THUNDER=y +CONFIG_ARCH_VEXPRESS=y +CONFIG_ARCH_XGENE=y +CONFIG_ARCH_ZYNQMP=y + +# +# Bus support +# +CONFIG_PCI=y +CONFIG_PCI_DOMAINS=y +CONFIG_PCI_DOMAINS_GENERIC=y +CONFIG_PCI_SYSCALL=y +CONFIG_PCI_BUS_ADDR_T_64BIT=y +CONFIG_PCI_MSI=y +CONFIG_PCI_MSI_IRQ_DOMAIN=y +# CONFIG_PCI_DEBUG is not set +# CONFIG_PCI_REALLOC_ENABLE_AUTO is not set +# CONFIG_PCI_STUB is not set +# CONFIG_PCI_IOV is not set +# CONFIG_PCI_PRI is not set +# CONFIG_PCI_PASID is not set +CONFIG_PCI_LABEL=y + +# +# PCI host controller drivers +# +CONFIG_PCI_HOST_GENERIC=y +CONFIG_PCI_XGENE=y +CONFIG_PCI_XGENE_MSI=y +# CONFIG_PCI_LAYERSCAPE is not set +CONFIG_PCIE_IPROC=y +CONFIG_PCIE_IPROC_PLATFORM=y +# CONFIG_PCI_HISI is not set +CONFIG_PCIEPORTBUS=y +CONFIG_PCIEAER=y +# CONFIG_PCIE_ECRC is not set +# CONFIG_PCIEAER_INJECT is not set +CONFIG_PCIEASPM=y +# CONFIG_PCIEASPM_DEBUG is not set +CONFIG_PCIEASPM_DEFAULT=y +# CONFIG_PCIEASPM_POWERSAVE is not set +# CONFIG_PCIEASPM_PERFORMANCE is not set +CONFIG_PCIE_PME=y +# CONFIG_HOTPLUG_PCI is not set + +# +# Kernel Features +# + +# +# ARM errata workarounds via the alternatives framework +# +CONFIG_ARM64_ERRATUM_826319=y +CONFIG_ARM64_ERRATUM_827319=y +CONFIG_ARM64_ERRATUM_824069=y +CONFIG_ARM64_ERRATUM_819472=y +CONFIG_ARM64_ERRATUM_832075=y +CONFIG_ARM64_ERRATUM_834220=y +CONFIG_ARM64_ERRATUM_845719=y +CONFIG_ARM64_ERRATUM_843419=y +CONFIG_CAVIUM_ERRATUM_22375=y +CONFIG_CAVIUM_ERRATUM_23154=y +CONFIG_CAVIUM_ERRATUM_27456=y +CONFIG_ARM64_4K_PAGES=y +# CONFIG_ARM64_16K_PAGES is not set +# CONFIG_ARM64_64K_PAGES is not set +CONFIG_ARM64_VA_BITS_39=y +# CONFIG_ARM64_VA_BITS_48 is not set +CONFIG_ARM64_VA_BITS=39 +# CONFIG_CPU_BIG_ENDIAN is not set +CONFIG_SCHED_MC=y +# CONFIG_SCHED_SMT is not set +CONFIG_NR_CPUS=64 +CONFIG_HOTPLUG_CPU=y +# CONFIG_PREEMPT_NONE is not set +# CONFIG_PREEMPT_VOLUNTARY is not set +CONFIG_PREEMPT=y +CONFIG_PREEMPT_COUNT=y +# CONFIG_HZ_100 is not set +CONFIG_HZ_250=y +# CONFIG_HZ_300 is not set +# CONFIG_HZ_1000 is not set +CONFIG_HZ=250 +CONFIG_SCHED_HRTICK=y +CONFIG_ARCH_HAS_HOLES_MEMORYMODEL=y +CONFIG_ARCH_SPARSEMEM_ENABLE=y +CONFIG_ARCH_SPARSEMEM_DEFAULT=y +CONFIG_ARCH_SELECT_MEMORY_MODEL=y +CONFIG_HAVE_ARCH_PFN_VALID=y +CONFIG_HW_PERF_EVENTS=y +CONFIG_SYS_SUPPORTS_HUGETLBFS=y +CONFIG_ARCH_WANT_GENERAL_HUGETLB=y +CONFIG_ARCH_WANT_HUGE_PMD_SHARE=y +CONFIG_ARCH_HAS_CACHE_LINE_SIZE=y +CONFIG_SELECT_MEMORY_MODEL=y +CONFIG_SPARSEMEM_MANUAL=y +CONFIG_SPARSEMEM=y +CONFIG_HAVE_MEMORY_PRESENT=y +CONFIG_SPARSEMEM_EXTREME=y +CONFIG_SPARSEMEM_VMEMMAP_ENABLE=y +CONFIG_SPARSEMEM_VMEMMAP=y +CONFIG_HAVE_MEMBLOCK=y +CONFIG_NO_BOOTMEM=y +CONFIG_MEMORY_ISOLATION=y +# CONFIG_HAVE_BOOTMEM_INFO_NODE is not set +CONFIG_SPLIT_PTLOCK_CPUS=4 +CONFIG_MEMORY_BALLOON=y +CONFIG_BALLOON_COMPACTION=y +CONFIG_COMPACTION=y +CONFIG_MIGRATION=y +CONFIG_PHYS_ADDR_T_64BIT=y +CONFIG_ZONE_DMA_FLAG=1 +CONFIG_BOUNCE=y +CONFIG_MMU_NOTIFIER=y +CONFIG_KSM=y +CONFIG_DEFAULT_MMAP_MIN_ADDR=4096 +CONFIG_TRANSPARENT_HUGEPAGE=y +CONFIG_TRANSPARENT_HUGEPAGE_ALWAYS=y +# CONFIG_TRANSPARENT_HUGEPAGE_MADVISE is not set +# CONFIG_CLEANCACHE is not set +# CONFIG_FRONTSWAP is not set +CONFIG_CMA=y +# CONFIG_CMA_DEBUG is not set +# CONFIG_CMA_DEBUGFS is not set +CONFIG_CMA_AREAS=7 +# CONFIG_ZPOOL is not set +# CONFIG_ZBUD is not set +# CONFIG_ZSMALLOC is not set +CONFIG_GENERIC_EARLY_IOREMAP=y +# CONFIG_IDLE_PAGE_TRACKING is not set +# CONFIG_SECCOMP is not set +# CONFIG_XEN is not set +CONFIG_FORCE_MAX_ZONEORDER=11 +# CONFIG_ARMV8_DEPRECATED is not set + +# +# ARMv8.1 architectural features +# +CONFIG_ARM64_HW_AFDBM=y +CONFIG_ARM64_PAN=y +# CONFIG_ARM64_LSE_ATOMICS is not set + +# +# Boot options +# +CONFIG_CMDLINE="console=ttyAMA0" +# CONFIG_CMDLINE_FORCE is not set +CONFIG_EFI_STUB=y +CONFIG_EFI=y +CONFIG_DMI=y + +# +# Userspace binary formats +# +CONFIG_BINFMT_ELF=y +CONFIG_COMPAT_BINFMT_ELF=y +# CONFIG_CORE_DUMP_DEFAULT_ELF_HEADERS is not set +CONFIG_BINFMT_SCRIPT=y +# CONFIG_HAVE_AOUT is not set +# CONFIG_BINFMT_MISC is not set +CONFIG_COREDUMP=y +CONFIG_COMPAT=y +CONFIG_SYSVIPC_COMPAT=y + +# +# Power management options +# +CONFIG_SUSPEND=y +CONFIG_SUSPEND_FREEZER=y +CONFIG_PM_SLEEP=y +CONFIG_PM_SLEEP_SMP=y +# CONFIG_PM_AUTOSLEEP is not set +# CONFIG_PM_WAKELOCKS is not set +CONFIG_PM=y +# CONFIG_PM_DEBUG is not set +CONFIG_PM_CLK=y +CONFIG_PM_GENERIC_DOMAINS=y +# CONFIG_WQ_POWER_EFFICIENT_DEFAULT is not set +CONFIG_PM_GENERIC_DOMAINS_SLEEP=y +CONFIG_PM_GENERIC_DOMAINS_OF=y +CONFIG_CPU_PM=y +CONFIG_ARCH_SUSPEND_POSSIBLE=y + +# +# CPU Power Management +# + +# +# CPU Idle +# +CONFIG_CPU_IDLE=y +CONFIG_CPU_IDLE_GOV_LADDER=y +CONFIG_CPU_IDLE_GOV_MENU=y +CONFIG_DT_IDLE_STATES=y + +# +# ARM CPU Idle Drivers +# +CONFIG_ARM_CPUIDLE=y +# CONFIG_ARCH_NEEDS_CPU_IDLE_COUPLED is not set + +# +# CPU Frequency scaling +# +# CONFIG_CPU_FREQ is not set +CONFIG_NET=y + +# +# Networking options +# +CONFIG_PACKET=y +# CONFIG_PACKET_DIAG is not set +CONFIG_UNIX=y +# CONFIG_UNIX_DIAG is not set +CONFIG_XFRM=y +# CONFIG_XFRM_USER is not set +# CONFIG_XFRM_SUB_POLICY is not set +# CONFIG_XFRM_MIGRATE is not set +# CONFIG_XFRM_STATISTICS is not set +# CONFIG_NET_KEY is not set +CONFIG_INET=y +# CONFIG_IP_MULTICAST is not set +# CONFIG_IP_ADVANCED_ROUTER is not set +CONFIG_IP_PNP=y +CONFIG_IP_PNP_DHCP=y +CONFIG_IP_PNP_BOOTP=y +# CONFIG_IP_PNP_RARP is not set +# CONFIG_NET_IPIP is not set +# CONFIG_NET_IPGRE_DEMUX is not set +CONFIG_NET_IP_TUNNEL=m +# CONFIG_SYN_COOKIES is not set +# CONFIG_NET_IPVTI is not set +# CONFIG_NET_UDP_TUNNEL is not set +# CONFIG_NET_FOU is not set +# CONFIG_NET_FOU_IP_TUNNELS is not set +# CONFIG_INET_AH is not set +# CONFIG_INET_ESP is not set +# CONFIG_INET_IPCOMP is not set +# CONFIG_INET_XFRM_TUNNEL is not set +CONFIG_INET_TUNNEL=m +CONFIG_INET_XFRM_MODE_TRANSPORT=y +CONFIG_INET_XFRM_MODE_TUNNEL=y +CONFIG_INET_XFRM_MODE_BEET=y +# CONFIG_INET_LRO is not set +CONFIG_INET_DIAG=y +CONFIG_INET_TCP_DIAG=y +# CONFIG_INET_UDP_DIAG is not set +# CONFIG_TCP_CONG_ADVANCED is not set +CONFIG_TCP_CONG_CUBIC=y +CONFIG_DEFAULT_TCP_CONG="cubic" +# CONFIG_TCP_MD5SIG is not set +CONFIG_IPV6=y +# CONFIG_IPV6_ROUTER_PREF is not set +# CONFIG_IPV6_OPTIMISTIC_DAD is not set +# CONFIG_INET6_AH is not set +# CONFIG_INET6_ESP is not set +# CONFIG_INET6_IPCOMP is not set +# CONFIG_IPV6_MIP6 is not set +# CONFIG_IPV6_ILA is not set +# CONFIG_INET6_XFRM_TUNNEL is not set +# CONFIG_INET6_TUNNEL is not set +CONFIG_INET6_XFRM_MODE_TRANSPORT=m +CONFIG_INET6_XFRM_MODE_TUNNEL=m +CONFIG_INET6_XFRM_MODE_BEET=m +# CONFIG_INET6_XFRM_MODE_ROUTEOPTIMIZATION is not set +# CONFIG_IPV6_VTI is not set +CONFIG_IPV6_SIT=m +# CONFIG_IPV6_SIT_6RD is not set +CONFIG_IPV6_NDISC_NODETYPE=y +# CONFIG_IPV6_TUNNEL is not set +# CONFIG_IPV6_GRE is not set +# CONFIG_IPV6_MULTIPLE_TABLES is not set +# CONFIG_IPV6_MROUTE is not set +# CONFIG_NETLABEL is not set +# CONFIG_NETWORK_SECMARK is not set +# CONFIG_NET_PTP_CLASSIFY is not set +# CONFIG_NETWORK_PHY_TIMESTAMPING is not set +# CONFIG_NETFILTER is not set +# CONFIG_IP_DCCP is not set +# CONFIG_IP_SCTP is not set +# CONFIG_RDS is not set +# CONFIG_TIPC is not set +# CONFIG_ATM is not set +# CONFIG_L2TP is not set +# CONFIG_BRIDGE is not set +CONFIG_HAVE_NET_DSA=y +# CONFIG_VLAN_8021Q is not set +# CONFIG_DECNET is not set +# CONFIG_LLC2 is not set +# CONFIG_IPX is not set +# CONFIG_ATALK is not set +# CONFIG_X25 is not set +# CONFIG_LAPB is not set +# CONFIG_PHONET is not set +# CONFIG_6LOWPAN is not set +# CONFIG_IEEE802154 is not set +# CONFIG_NET_SCHED is not set +# CONFIG_DCB is not set +CONFIG_DNS_RESOLVER=y +# CONFIG_BATMAN_ADV is not set +# CONFIG_OPENVSWITCH is not set +# CONFIG_VSOCKETS is not set +# CONFIG_NETLINK_MMAP is not set +# CONFIG_NETLINK_DIAG is not set +# CONFIG_MPLS is not set +# CONFIG_HSR is not set +# CONFIG_NET_SWITCHDEV is not set +# CONFIG_NET_L3_MASTER_DEV is not set +CONFIG_RPS=y +CONFIG_RFS_ACCEL=y +CONFIG_XPS=y +# CONFIG_CGROUP_NET_PRIO is not set +# CONFIG_CGROUP_NET_CLASSID is not set +CONFIG_NET_RX_BUSY_POLL=y +CONFIG_BQL=y +CONFIG_BPF_JIT=y +CONFIG_NET_FLOW_LIMIT=y + +# +# Network testing +# +# CONFIG_NET_PKTGEN is not set +# CONFIG_HAMRADIO is not set +# CONFIG_CAN is not set +# CONFIG_IRDA is not set +# CONFIG_BT is not set +# CONFIG_AF_RXRPC is not set +# CONFIG_WIRELESS is not set +# CONFIG_WIMAX is not set +# CONFIG_RFKILL is not set +# CONFIG_RFKILL_REGULATOR is not set +CONFIG_NET_9P=y +CONFIG_NET_9P_VIRTIO=y +# CONFIG_NET_9P_DEBUG is not set +# CONFIG_CAIF is not set +# CONFIG_CEPH_LIB is not set +# CONFIG_NFC is not set +# CONFIG_LWTUNNEL is not set +CONFIG_HAVE_BPF_JIT=y + +# +# Device Drivers +# +CONFIG_ARM_AMBA=y +# CONFIG_TEGRA_AHB is not set + +# +# Generic Driver Options +# +CONFIG_UEVENT_HELPER=y +CONFIG_UEVENT_HELPER_PATH="/sbin/hotplug" +CONFIG_DEVTMPFS=y +CONFIG_DEVTMPFS_MOUNT=y +CONFIG_STANDALONE=y +CONFIG_PREVENT_FIRMWARE_BUILD=y +CONFIG_FW_LOADER=y +CONFIG_FIRMWARE_IN_KERNEL=y +CONFIG_EXTRA_FIRMWARE="" +# CONFIG_FW_LOADER_USER_HELPER_FALLBACK is not set +CONFIG_ALLOW_DEV_COREDUMP=y +# CONFIG_DEBUG_DRIVER is not set +# CONFIG_DEBUG_DEVRES is not set +# CONFIG_SYS_HYPERVISOR is not set +# CONFIG_GENERIC_CPU_DEVICES is not set +CONFIG_GENERIC_CPU_AUTOPROBE=y +CONFIG_REGMAP=y +CONFIG_REGMAP_MMIO=y +# CONFIG_DMA_SHARED_BUFFER is not set +CONFIG_DMA_CMA=y + +# +# Default contiguous memory area size: +# +CONFIG_CMA_SIZE_MBYTES=16 +CONFIG_CMA_SIZE_SEL_MBYTES=y +# CONFIG_CMA_SIZE_SEL_PERCENTAGE is not set +# CONFIG_CMA_SIZE_SEL_MIN is not set +# CONFIG_CMA_SIZE_SEL_MAX is not set +CONFIG_CMA_ALIGNMENT=8 + +# +# Bus devices +# +# CONFIG_ARM_CCI400_PMU is not set +# CONFIG_ARM_CCI500_PMU is not set +# CONFIG_ARM_CCN is not set +CONFIG_VEXPRESS_CONFIG=y +# CONFIG_CONNECTOR is not set +# CONFIG_MTD is not set +CONFIG_DTC=y +CONFIG_OF=y +# CONFIG_OF_UNITTEST is not set +CONFIG_OF_FLATTREE=y +CONFIG_OF_EARLY_FLATTREE=y +CONFIG_OF_ADDRESS=y +CONFIG_OF_ADDRESS_PCI=y +CONFIG_OF_IRQ=y +CONFIG_OF_NET=y +CONFIG_OF_MDIO=y +CONFIG_OF_PCI=y +CONFIG_OF_PCI_IRQ=y +CONFIG_OF_RESERVED_MEM=y +# CONFIG_OF_OVERLAY is not set +# CONFIG_PARPORT is not set +CONFIG_BLK_DEV=y +# CONFIG_BLK_DEV_NULL_BLK is not set +# CONFIG_BLK_DEV_PCIESSD_MTIP32XX is not set +# CONFIG_BLK_CPQ_CISS_DA is not set +# CONFIG_BLK_DEV_DAC960 is not set +# CONFIG_BLK_DEV_UMEM is not set +# CONFIG_BLK_DEV_COW_COMMON is not set +CONFIG_BLK_DEV_LOOP=y +CONFIG_BLK_DEV_LOOP_MIN_COUNT=8 +# CONFIG_BLK_DEV_CRYPTOLOOP is not set +# CONFIG_BLK_DEV_DRBD is not set +# CONFIG_BLK_DEV_NBD is not set +# CONFIG_BLK_DEV_SKD is not set +# CONFIG_BLK_DEV_SX8 is not set +# CONFIG_BLK_DEV_RAM is not set +# CONFIG_CDROM_PKTCDVD is not set +# CONFIG_ATA_OVER_ETH is not set +CONFIG_VIRTIO_BLK=y +# CONFIG_BLK_DEV_RBD is not set +# CONFIG_BLK_DEV_RSXX is not set +# CONFIG_BLK_DEV_NVME is not set + +# +# Misc devices +# +# CONFIG_SENSORS_LIS3LV02D is not set +# CONFIG_AD525X_DPOT is not set +# CONFIG_DUMMY_IRQ is not set +# CONFIG_PHANTOM is not set +# CONFIG_SGI_IOC4 is not set +# CONFIG_TIFM_CORE is not set +# CONFIG_ICS932S401 is not set +# CONFIG_ENCLOSURE_SERVICES is not set +# CONFIG_HP_ILO is not set +# CONFIG_APDS9802ALS is not set +# CONFIG_ISL29003 is not set +# CONFIG_ISL29020 is not set +# CONFIG_SENSORS_TSL2550 is not set +# CONFIG_SENSORS_BH1780 is not set +# CONFIG_SENSORS_BH1770 is not set +# CONFIG_SENSORS_APDS990X is not set +# CONFIG_HMC6352 is not set +# CONFIG_DS1682 is not set +# CONFIG_TI_DAC7512 is not set +# CONFIG_BMP085_I2C is not set +# CONFIG_BMP085_SPI is not set +# CONFIG_USB_SWITCH_FSA9480 is not set +# CONFIG_LATTICE_ECP3_CONFIG is not set +# CONFIG_SRAM is not set +CONFIG_VEXPRESS_SYSCFG=y +# CONFIG_C2PORT is not set + +# +# EEPROM support +# +# CONFIG_EEPROM_AT24 is not set +# CONFIG_EEPROM_AT25 is not set +# CONFIG_EEPROM_LEGACY is not set +# CONFIG_EEPROM_MAX6875 is not set +# CONFIG_EEPROM_93CX6 is not set +# CONFIG_EEPROM_93XX46 is not set +# CONFIG_CB710_CORE is not set + +# +# Texas Instruments shared transport line discipline +# +# CONFIG_TI_ST is not set +# CONFIG_SENSORS_LIS3_SPI is not set +# CONFIG_SENSORS_LIS3_I2C is not set + +# +# Altera FPGA firmware download module +# +# CONFIG_ALTERA_STAPL is not set + +# +# Intel MIC Bus Driver +# + +# +# SCIF Bus Driver +# + +# +# Intel MIC Host Driver +# + +# +# Intel MIC Card Driver +# + +# +# SCIF Driver +# + +# +# Intel MIC Coprocessor State Management (COSM) Drivers +# +# CONFIG_GENWQE is not set +# CONFIG_ECHO is not set +# CONFIG_CXL_BASE is not set +# CONFIG_CXL_KERNEL_API is not set +# CONFIG_CXL_EEH is not set + +# +# SCSI device support +# +CONFIG_SCSI_MOD=y +# CONFIG_RAID_ATTRS is not set +CONFIG_SCSI=y +CONFIG_SCSI_DMA=y +# CONFIG_SCSI_NETLINK is not set +# CONFIG_SCSI_MQ_DEFAULT is not set +# CONFIG_SCSI_PROC_FS is not set + +# +# SCSI support type (disk, tape, CD-ROM) +# +CONFIG_BLK_DEV_SD=y +# CONFIG_CHR_DEV_ST is not set +# CONFIG_CHR_DEV_OSST is not set +# CONFIG_BLK_DEV_SR is not set +# CONFIG_CHR_DEV_SG is not set +# CONFIG_CHR_DEV_SCH is not set +# CONFIG_SCSI_CONSTANTS is not set +# CONFIG_SCSI_LOGGING is not set +# CONFIG_SCSI_SCAN_ASYNC is not set + +# +# SCSI Transports +# +# CONFIG_SCSI_SPI_ATTRS is not set +# CONFIG_SCSI_FC_ATTRS is not set +# CONFIG_SCSI_ISCSI_ATTRS is not set +# CONFIG_SCSI_SAS_ATTRS is not set +# CONFIG_SCSI_SAS_LIBSAS is not set +# CONFIG_SCSI_SRP_ATTRS is not set +# CONFIG_SCSI_LOWLEVEL is not set +# CONFIG_SCSI_LOWLEVEL_PCMCIA is not set +# CONFIG_SCSI_DH is not set +# CONFIG_SCSI_OSD_INITIATOR is not set +CONFIG_HAVE_PATA_PLATFORM=y +CONFIG_ATA=y +# CONFIG_ATA_NONSTANDARD is not set +CONFIG_ATA_VERBOSE_ERROR=y +CONFIG_SATA_PMP=y + +# +# Controllers with non-SFF native interface +# +CONFIG_SATA_AHCI=y +CONFIG_SATA_AHCI_PLATFORM=y +CONFIG_AHCI_CEVA=y +# CONFIG_AHCI_TEGRA is not set +CONFIG_AHCI_XGENE=y +# CONFIG_AHCI_QORIQ is not set +# CONFIG_SATA_INIC162X is not set +# CONFIG_SATA_ACARD_AHCI is not set +# CONFIG_SATA_SIL24 is not set +CONFIG_ATA_SFF=y + +# +# SFF controllers with custom DMA interface +# +# CONFIG_PDC_ADMA is not set +# CONFIG_SATA_QSTOR is not set +# CONFIG_SATA_SX4 is not set +CONFIG_ATA_BMDMA=y + +# +# SATA SFF controllers with BMDMA +# +# CONFIG_ATA_PIIX is not set +# CONFIG_SATA_MV is not set +# CONFIG_SATA_NV is not set +# CONFIG_SATA_PROMISE is not set +# CONFIG_SATA_SIL is not set +# CONFIG_SATA_SIS is not set +# CONFIG_SATA_SVW is not set +# CONFIG_SATA_ULI is not set +# CONFIG_SATA_VIA is not set +# CONFIG_SATA_VITESSE is not set + +# +# PATA SFF controllers with BMDMA +# +# CONFIG_PATA_ALI is not set +# CONFIG_PATA_AMD is not set +# CONFIG_PATA_ARTOP is not set +# CONFIG_PATA_ATIIXP is not set +# CONFIG_PATA_ATP867X is not set +# CONFIG_PATA_CMD64X is not set +# CONFIG_PATA_CYPRESS is not set +# CONFIG_PATA_EFAR is not set +# CONFIG_PATA_HPT366 is not set +# CONFIG_PATA_HPT37X is not set +# CONFIG_PATA_HPT3X2N is not set +# CONFIG_PATA_HPT3X3 is not set +# CONFIG_PATA_IT8213 is not set +# CONFIG_PATA_IT821X is not set +# CONFIG_PATA_JMICRON is not set +# CONFIG_PATA_MARVELL is not set +# CONFIG_PATA_NETCELL is not set +# CONFIG_PATA_NINJA32 is not set +# CONFIG_PATA_NS87415 is not set +# CONFIG_PATA_OLDPIIX is not set +# CONFIG_PATA_OPTIDMA is not set +# CONFIG_PATA_PDC2027X is not set +# CONFIG_PATA_PDC_OLD is not set +# CONFIG_PATA_RADISYS is not set +# CONFIG_PATA_RDC is not set +# CONFIG_PATA_SCH is not set +# CONFIG_PATA_SERVERWORKS is not set +# CONFIG_PATA_SIL680 is not set +# CONFIG_PATA_SIS is not set +# CONFIG_PATA_TOSHIBA is not set +# CONFIG_PATA_TRIFLEX is not set +# CONFIG_PATA_VIA is not set +# CONFIG_PATA_WINBOND is not set + +# +# PIO-only SFF controllers +# +# CONFIG_PATA_CMD640_PCI is not set +# CONFIG_PATA_MPIIX is not set +# CONFIG_PATA_NS87410 is not set +# CONFIG_PATA_OPTI is not set +CONFIG_PATA_PLATFORM=y +CONFIG_PATA_OF_PLATFORM=y +# CONFIG_PATA_RZ1000 is not set + +# +# Generic fallback / legacy drivers +# +# CONFIG_ATA_GENERIC is not set +# CONFIG_PATA_LEGACY is not set +# CONFIG_MD is not set +# CONFIG_TARGET_CORE is not set +# CONFIG_FUSION is not set + +# +# IEEE 1394 (FireWire) support +# +# CONFIG_FIREWIRE is not set +# CONFIG_FIREWIRE_NOSY is not set +CONFIG_NETDEVICES=y +CONFIG_MII=y +CONFIG_NET_CORE=y +# CONFIG_BONDING is not set +# CONFIG_DUMMY is not set +# CONFIG_EQUALIZER is not set +# CONFIG_NET_FC is not set +# CONFIG_NET_TEAM is not set +# CONFIG_MACVLAN is not set +# CONFIG_IPVLAN is not set +# CONFIG_VXLAN is not set +# CONFIG_NETCONSOLE is not set +# CONFIG_NETPOLL is not set +# CONFIG_NET_POLL_CONTROLLER is not set +CONFIG_TUN=y +# CONFIG_TUN_VNET_CROSS_LE is not set +# CONFIG_VETH is not set +CONFIG_VIRTIO_NET=y +# CONFIG_NLMON is not set +# CONFIG_ARCNET is not set + +# +# CAIF transport drivers +# +# CONFIG_VHOST_NET is not set +# CONFIG_VHOST_CROSS_ENDIAN_LEGACY is not set + +# +# Distributed Switch Architecture drivers +# +# CONFIG_NET_DSA_MV88E6XXX is not set +# CONFIG_NET_DSA_MV88E6XXX_NEED_PPU is not set +CONFIG_ETHERNET=y +CONFIG_NET_VENDOR_3COM=y +# CONFIG_VORTEX is not set +# CONFIG_TYPHOON is not set +CONFIG_NET_VENDOR_ADAPTEC=y +# CONFIG_ADAPTEC_STARFIRE is not set +CONFIG_NET_VENDOR_AGERE=y +# CONFIG_ET131X is not set +CONFIG_NET_VENDOR_ALTEON=y +# CONFIG_ACENIC is not set +# CONFIG_ALTERA_TSE is not set +CONFIG_NET_VENDOR_AMD=y +# CONFIG_AMD8111_ETH is not set +# CONFIG_PCNET32 is not set +# CONFIG_AMD_XGBE is not set +CONFIG_NET_XGENE=y +CONFIG_NET_VENDOR_ARC=y +# CONFIG_ARC_EMAC is not set +# CONFIG_EMAC_ROCKCHIP is not set +CONFIG_NET_VENDOR_ATHEROS=y +# CONFIG_ATL2 is not set +# CONFIG_ATL1 is not set +# CONFIG_ATL1E is not set +# CONFIG_ATL1C is not set +# CONFIG_ALX is not set +# CONFIG_NET_VENDOR_AURORA is not set +CONFIG_NET_CADENCE=y +# CONFIG_MACB is not set +CONFIG_NET_VENDOR_BROADCOM=y +# CONFIG_B44 is not set +# CONFIG_BCMGENET is not set +# CONFIG_BNX2 is not set +# CONFIG_CNIC is not set +# CONFIG_TIGON3 is not set +# CONFIG_BNX2X is not set +# CONFIG_SYSTEMPORT is not set +# CONFIG_BNXT is not set +CONFIG_NET_VENDOR_BROCADE=y +# CONFIG_BNA is not set +CONFIG_NET_VENDOR_CAVIUM=y +# CONFIG_THUNDER_NIC_PF is not set +# CONFIG_THUNDER_NIC_VF is not set +# CONFIG_THUNDER_NIC_BGX is not set +# CONFIG_LIQUIDIO is not set +CONFIG_NET_VENDOR_CHELSIO=y +# CONFIG_CHELSIO_T1 is not set +# CONFIG_CHELSIO_T3 is not set +# CONFIG_CHELSIO_T4 is not set +# CONFIG_CHELSIO_T4VF is not set +CONFIG_NET_VENDOR_CISCO=y +# CONFIG_ENIC is not set +# CONFIG_DNET is not set +CONFIG_NET_VENDOR_DEC=y +# CONFIG_NET_TULIP is not set +CONFIG_NET_VENDOR_DLINK=y +# CONFIG_DL2K is not set +# CONFIG_SUNDANCE is not set +CONFIG_NET_VENDOR_EMULEX=y +# CONFIG_BE2NET is not set +CONFIG_NET_VENDOR_EZCHIP=y +# CONFIG_EZCHIP_NPS_MANAGEMENT_ENET is not set +CONFIG_NET_VENDOR_EXAR=y +# CONFIG_S2IO is not set +# CONFIG_VXGE is not set +CONFIG_NET_VENDOR_FREESCALE=y +# CONFIG_FSL_PQ_MDIO is not set +# CONFIG_FSL_XGMAC_MDIO is not set +# CONFIG_GIANFAR is not set +CONFIG_NET_VENDOR_HISILICON=y +# CONFIG_HIX5HD2_GMAC is not set +# CONFIG_HIP04_ETH is not set +# CONFIG_HNS is not set +# CONFIG_HNS_DSAF is not set +# CONFIG_HNS_ENET is not set +CONFIG_NET_VENDOR_HP=y +# CONFIG_HP100 is not set +CONFIG_NET_VENDOR_INTEL=y +# CONFIG_E100 is not set +# CONFIG_E1000 is not set +# CONFIG_E1000E is not set +# CONFIG_IGB is not set +# CONFIG_IGBVF is not set +# CONFIG_IXGB is not set +# CONFIG_IXGBE is not set +# CONFIG_IXGBEVF is not set +# CONFIG_I40E is not set +# CONFIG_I40EVF is not set +# CONFIG_FM10K is not set +CONFIG_NET_VENDOR_I825XX=y +# CONFIG_JME is not set +CONFIG_NET_VENDOR_MARVELL=y +# CONFIG_MVMDIO is not set +# CONFIG_PXA168_ETH is not set +# CONFIG_SKGE is not set +CONFIG_SKY2=y +# CONFIG_SKY2_DEBUG is not set +CONFIG_NET_VENDOR_MELLANOX=y +# CONFIG_MLX4_EN is not set +# CONFIG_MLX4_CORE is not set +# CONFIG_MLX5_CORE is not set +# CONFIG_MLXSW_CORE is not set +CONFIG_NET_VENDOR_MICREL=y +# CONFIG_KS8842 is not set +# CONFIG_KS8851 is not set +# CONFIG_KS8851_MLL is not set +# CONFIG_KSZ884X_PCI is not set +CONFIG_NET_VENDOR_MICROCHIP=y +# CONFIG_ENC28J60 is not set +# CONFIG_ENCX24J600 is not set +CONFIG_NET_VENDOR_MYRI=y +# CONFIG_MYRI10GE is not set +# CONFIG_FEALNX is not set +CONFIG_NET_VENDOR_NATSEMI=y +# CONFIG_NATSEMI is not set +# CONFIG_NS83820 is not set +CONFIG_NET_VENDOR_8390=y +# CONFIG_NE2K_PCI is not set +CONFIG_NET_VENDOR_NVIDIA=y +# CONFIG_FORCEDETH is not set +CONFIG_NET_VENDOR_OKI=y +# CONFIG_ETHOC is not set +CONFIG_NET_PACKET_ENGINE=y +# CONFIG_HAMACHI is not set +# CONFIG_YELLOWFIN is not set +CONFIG_NET_VENDOR_QLOGIC=y +# CONFIG_QLA3XXX is not set +# CONFIG_QLCNIC is not set +# CONFIG_QLGE is not set +# CONFIG_NETXEN_NIC is not set +# CONFIG_QED is not set +CONFIG_NET_VENDOR_QUALCOMM=y +# CONFIG_QCA7000 is not set +CONFIG_NET_VENDOR_REALTEK=y +# CONFIG_8139CP is not set +# CONFIG_8139TOO is not set +# CONFIG_R8169 is not set +CONFIG_NET_VENDOR_RENESAS=y +CONFIG_NET_VENDOR_RDC=y +# CONFIG_R6040 is not set +CONFIG_NET_VENDOR_ROCKER=y +CONFIG_NET_VENDOR_SAMSUNG=y +# CONFIG_SXGBE_ETH is not set +CONFIG_NET_VENDOR_SEEQ=y +CONFIG_NET_VENDOR_SILAN=y +# CONFIG_SC92031 is not set +CONFIG_NET_VENDOR_SIS=y +# CONFIG_SIS900 is not set +# CONFIG_SIS190 is not set +# CONFIG_SFC is not set +CONFIG_NET_VENDOR_SMSC=y +CONFIG_SMC91X=y +# CONFIG_EPIC100 is not set +CONFIG_SMSC911X=y +# CONFIG_SMSC911X_ARCH_HOOKS is not set +# CONFIG_SMSC9420 is not set +CONFIG_NET_VENDOR_STMICRO=y +# CONFIG_STMMAC_ETH is not set +CONFIG_NET_VENDOR_SUN=y +# CONFIG_HAPPYMEAL is not set +# CONFIG_SUNGEM is not set +# CONFIG_CASSINI is not set +# CONFIG_NIU is not set +CONFIG_NET_VENDOR_SYNOPSYS=y +# CONFIG_SYNOPSYS_DWC_ETH_QOS is not set +CONFIG_NET_VENDOR_TEHUTI=y +# CONFIG_TEHUTI is not set +CONFIG_NET_VENDOR_TI=y +# CONFIG_TI_CPSW_ALE is not set +# CONFIG_TLAN is not set +CONFIG_NET_VENDOR_VIA=y +# CONFIG_VIA_RHINE is not set +# CONFIG_VIA_VELOCITY is not set +CONFIG_NET_VENDOR_WIZNET=y +# CONFIG_WIZNET_W5100 is not set +# CONFIG_WIZNET_W5300 is not set +# CONFIG_FDDI is not set +# CONFIG_HIPPI is not set +CONFIG_PHYLIB=y + +# +# MII PHY device drivers +# +# CONFIG_AQUANTIA_PHY is not set +# CONFIG_AT803X_PHY is not set +# CONFIG_AMD_PHY is not set +# CONFIG_MARVELL_PHY is not set +# CONFIG_DAVICOM_PHY is not set +# CONFIG_QSEMI_PHY is not set +# CONFIG_LXT_PHY is not set +# CONFIG_CICADA_PHY is not set +# CONFIG_VITESSE_PHY is not set +# CONFIG_TERANETICS_PHY is not set +# CONFIG_SMSC_PHY is not set +# CONFIG_BROADCOM_PHY is not set +# CONFIG_BCM7XXX_PHY is not set +# CONFIG_BCM87XX_PHY is not set +# CONFIG_ICPLUS_PHY is not set +# CONFIG_REALTEK_PHY is not set +# CONFIG_NATIONAL_PHY is not set +# CONFIG_STE10XP is not set +# CONFIG_LSI_ET1011C_PHY is not set +# CONFIG_MICREL_PHY is not set +# CONFIG_DP83848_PHY is not set +# CONFIG_DP83867_PHY is not set +# CONFIG_MICROCHIP_PHY is not set +# CONFIG_FIXED_PHY is not set +# CONFIG_MDIO_BITBANG is not set +# CONFIG_MDIO_OCTEON is not set +# CONFIG_MDIO_BUS_MUX_GPIO is not set +# CONFIG_MDIO_BUS_MUX_MMIOREG is not set +# CONFIG_MDIO_BCM_UNIMAC is not set +# CONFIG_MDIO_BCM_IPROC is not set +# CONFIG_MICREL_KS8995MA is not set +# CONFIG_PPP is not set +# CONFIG_SLIP is not set +CONFIG_USB_NET_DRIVERS=y +# CONFIG_USB_CATC is not set +# CONFIG_USB_KAWETH is not set +# CONFIG_USB_PEGASUS is not set +# CONFIG_USB_RTL8150 is not set +# CONFIG_USB_RTL8152 is not set +# CONFIG_USB_LAN78XX is not set +# CONFIG_USB_USBNET is not set +# CONFIG_USB_IPHETH is not set +# CONFIG_WLAN is not set + +# +# Enable WiMAX (Networking options) to see the WiMAX drivers +# +# CONFIG_WAN is not set +# CONFIG_VMXNET3 is not set +# CONFIG_ISDN is not set +# CONFIG_NVM is not set + +# +# Input device support +# +CONFIG_INPUT=y +CONFIG_INPUT_LEDS=y +# CONFIG_INPUT_FF_MEMLESS is not set +# CONFIG_INPUT_POLLDEV is not set +# CONFIG_INPUT_SPARSEKMAP is not set +# CONFIG_INPUT_MATRIXKMAP is not set + +# +# Userland interfaces +# +CONFIG_INPUT_MOUSEDEV=y +CONFIG_INPUT_MOUSEDEV_PSAUX=y +CONFIG_INPUT_MOUSEDEV_SCREEN_X=1024 +CONFIG_INPUT_MOUSEDEV_SCREEN_Y=768 +# CONFIG_INPUT_JOYDEV is not set +CONFIG_INPUT_EVDEV=y +# CONFIG_INPUT_EVBUG is not set + +# +# Input Device Drivers +# +CONFIG_INPUT_KEYBOARD=y +# CONFIG_KEYBOARD_ADP5588 is not set +# CONFIG_KEYBOARD_ADP5589 is not set +CONFIG_KEYBOARD_ATKBD=y +# CONFIG_KEYBOARD_QT1070 is not set +# CONFIG_KEYBOARD_QT2160 is not set +# CONFIG_KEYBOARD_LKKBD is not set +CONFIG_KEYBOARD_GPIO=y +# CONFIG_KEYBOARD_GPIO_POLLED is not set +# CONFIG_KEYBOARD_TCA6416 is not set +# CONFIG_KEYBOARD_TCA8418 is not set +# CONFIG_KEYBOARD_MATRIX is not set +# CONFIG_KEYBOARD_LM8323 is not set +# CONFIG_KEYBOARD_LM8333 is not set +# CONFIG_KEYBOARD_MAX7359 is not set +# CONFIG_KEYBOARD_MCS is not set +# CONFIG_KEYBOARD_MPR121 is not set +# CONFIG_KEYBOARD_NEWTON is not set +# CONFIG_KEYBOARD_TEGRA is not set +# CONFIG_KEYBOARD_OPENCORES is not set +# CONFIG_KEYBOARD_SAMSUNG is not set +# CONFIG_KEYBOARD_STOWAWAY is not set +# CONFIG_KEYBOARD_SUNKBD is not set +# CONFIG_KEYBOARD_OMAP4 is not set +# CONFIG_KEYBOARD_XTKBD is not set +# CONFIG_KEYBOARD_CAP11XX is not set +# CONFIG_KEYBOARD_BCM is not set +CONFIG_INPUT_MOUSE=y +CONFIG_MOUSE_PS2=y +CONFIG_MOUSE_PS2_ALPS=y +CONFIG_MOUSE_PS2_LOGIPS2PP=y +CONFIG_MOUSE_PS2_SYNAPTICS=y +CONFIG_MOUSE_PS2_CYPRESS=y +CONFIG_MOUSE_PS2_TRACKPOINT=y +# CONFIG_MOUSE_PS2_ELANTECH is not set +# CONFIG_MOUSE_PS2_SENTELIC is not set +# CONFIG_MOUSE_PS2_TOUCHKIT is not set +CONFIG_MOUSE_PS2_FOCALTECH=y +# CONFIG_MOUSE_SERIAL is not set +# CONFIG_MOUSE_APPLETOUCH is not set +# CONFIG_MOUSE_BCM5974 is not set +# CONFIG_MOUSE_CYAPA is not set +# CONFIG_MOUSE_ELAN_I2C is not set +# CONFIG_MOUSE_VSXXXAA is not set +# CONFIG_MOUSE_GPIO is not set +# CONFIG_MOUSE_SYNAPTICS_I2C is not set +# CONFIG_MOUSE_SYNAPTICS_USB is not set +# CONFIG_INPUT_JOYSTICK is not set +# CONFIG_INPUT_TABLET is not set +# CONFIG_INPUT_TOUCHSCREEN is not set +# CONFIG_INPUT_MISC is not set + +# +# Hardware I/O ports +# +CONFIG_SERIO=y +# CONFIG_SERIO_SERPORT is not set +CONFIG_SERIO_AMBAKMI=y +# CONFIG_SERIO_PCIPS2 is not set +CONFIG_SERIO_LIBPS2=y +# CONFIG_SERIO_RAW is not set +# CONFIG_SERIO_ALTERA_PS2 is not set +# CONFIG_SERIO_PS2MULT is not set +# CONFIG_SERIO_ARC_PS2 is not set +# CONFIG_SERIO_APBPS2 is not set +# CONFIG_USERIO is not set +# CONFIG_GAMEPORT is not set + +# +# Character devices +# +CONFIG_TTY=y +CONFIG_VT=y +CONFIG_CONSOLE_TRANSLATIONS=y +CONFIG_VT_CONSOLE=y +CONFIG_VT_CONSOLE_SLEEP=y +CONFIG_HW_CONSOLE=y +CONFIG_VT_HW_CONSOLE_BINDING=y +CONFIG_UNIX98_PTYS=y +# CONFIG_DEVPTS_MULTIPLE_INSTANCES is not set +CONFIG_LEGACY_PTYS=y +CONFIG_LEGACY_PTY_COUNT=16 +# CONFIG_SERIAL_NONSTANDARD is not set +# CONFIG_NOZOMI is not set +# CONFIG_N_GSM is not set +# CONFIG_TRACE_SINK is not set +CONFIG_DEVMEM=y +CONFIG_DEVKMEM=y + +# +# Serial drivers +# +CONFIG_SERIAL_EARLYCON=y +CONFIG_SERIAL_8250=y +CONFIG_SERIAL_8250_DEPRECATED_OPTIONS=y +CONFIG_SERIAL_8250_CONSOLE=y +CONFIG_SERIAL_8250_DMA=y +CONFIG_SERIAL_8250_PCI=y +CONFIG_SERIAL_8250_NR_UARTS=4 +CONFIG_SERIAL_8250_RUNTIME_UARTS=4 +# CONFIG_SERIAL_8250_EXTENDED is not set +CONFIG_SERIAL_8250_FSL=y +CONFIG_SERIAL_8250_DW=y +# CONFIG_SERIAL_8250_RT288X is not set +CONFIG_SERIAL_8250_MT6577=y +# CONFIG_SERIAL_8250_INGENIC is not set +# CONFIG_SERIAL_8250_MID is not set + +# +# Non-8250 serial port support +# +# CONFIG_SERIAL_AMBA_PL010 is not set +CONFIG_SERIAL_AMBA_PL011=y +CONFIG_SERIAL_AMBA_PL011_CONSOLE=y +# CONFIG_SERIAL_EARLYCON_ARM_SEMIHOST is not set +CONFIG_SERIAL_SAMSUNG=y +CONFIG_SERIAL_SAMSUNG_UARTS_4=y +CONFIG_SERIAL_SAMSUNG_UARTS=4 +CONFIG_SERIAL_SAMSUNG_CONSOLE=y +# CONFIG_SERIAL_MAX3100 is not set +# CONFIG_SERIAL_MAX310X is not set +# CONFIG_SERIAL_UARTLITE is not set +CONFIG_SERIAL_CORE=y +CONFIG_SERIAL_CORE_CONSOLE=y +# CONFIG_SERIAL_JSM is not set +CONFIG_SERIAL_MSM=y +CONFIG_SERIAL_MSM_CONSOLE=y +CONFIG_SERIAL_OF_PLATFORM=y +# CONFIG_SERIAL_SCCNXP is not set +# CONFIG_SERIAL_SC16IS7XX is not set +# CONFIG_SERIAL_ALTERA_JTAGUART is not set +# CONFIG_SERIAL_ALTERA_UART is not set +# CONFIG_SERIAL_IFX6X60 is not set +CONFIG_SERIAL_XILINX_PS_UART=y +CONFIG_SERIAL_XILINX_PS_UART_CONSOLE=y +# CONFIG_SERIAL_ARC is not set +# CONFIG_SERIAL_RP2 is not set +# CONFIG_SERIAL_FSL_LPUART is not set +# CONFIG_SERIAL_CONEXANT_DIGICOLOR is not set +# CONFIG_SERIAL_SPRD is not set +CONFIG_HVC_DRIVER=y +# CONFIG_HVC_DCC is not set +CONFIG_VIRTIO_CONSOLE=y +# CONFIG_IPMI_HANDLER is not set +# CONFIG_HW_RANDOM is not set +# CONFIG_R3964 is not set +# CONFIG_APPLICOM is not set + +# +# PCMCIA character devices +# +# CONFIG_RAW_DRIVER is not set +# CONFIG_TCG_TPM is not set +CONFIG_DEVPORT=y +# CONFIG_XILLYBUS is not set + +# +# I2C support +# +CONFIG_I2C=y +CONFIG_I2C_BOARDINFO=y +CONFIG_I2C_COMPAT=y +# CONFIG_I2C_CHARDEV is not set +# CONFIG_I2C_MUX is not set +CONFIG_I2C_HELPER_AUTO=y + +# +# I2C Hardware Bus support +# + +# +# PC SMBus host controller drivers +# +# CONFIG_I2C_ALI1535 is not set +# CONFIG_I2C_ALI1563 is not set +# CONFIG_I2C_ALI15X3 is not set +# CONFIG_I2C_AMD756 is not set +# CONFIG_I2C_AMD8111 is not set +# CONFIG_I2C_I801 is not set +# CONFIG_I2C_ISCH is not set +# CONFIG_I2C_PIIX4 is not set +# CONFIG_I2C_NFORCE2 is not set +# CONFIG_I2C_SIS5595 is not set +# CONFIG_I2C_SIS630 is not set +# CONFIG_I2C_SIS96X is not set +# CONFIG_I2C_VIA is not set +# CONFIG_I2C_VIAPRO is not set + +# +# I2C system bus drivers (mostly embedded / system-on-chip) +# +CONFIG_I2C_BCM_IPROC=y +# CONFIG_I2C_CADENCE is not set +# CONFIG_I2C_CBUS_GPIO is not set +# CONFIG_I2C_DESIGNWARE_PLATFORM is not set +# CONFIG_I2C_DESIGNWARE_PCI is not set +# CONFIG_I2C_EMEV2 is not set +CONFIG_I2C_EXYNOS5=y +# CONFIG_I2C_GPIO is not set +# CONFIG_I2C_IMX is not set +# CONFIG_I2C_MT65XX is not set +# CONFIG_I2C_NOMADIK is not set +# CONFIG_I2C_OCORES is not set +# CONFIG_I2C_PCA_PLATFORM is not set +# CONFIG_I2C_PXA_PCI is not set +CONFIG_I2C_QUP=y +# CONFIG_I2C_RK3X is not set +# CONFIG_I2C_SIMTEC is not set +# CONFIG_I2C_TEGRA is not set +# CONFIG_I2C_VERSATILE is not set +# CONFIG_I2C_XILINX is not set + +# +# External I2C/SMBus adapter drivers +# +# CONFIG_I2C_DIOLAN_U2C is not set +# CONFIG_I2C_PARPORT_LIGHT is not set +# CONFIG_I2C_ROBOTFUZZ_OSIF is not set +# CONFIG_I2C_TAOS_EVM is not set +# CONFIG_I2C_TINY_USB is not set + +# +# Other I2C/SMBus bus drivers +# +# CONFIG_I2C_STUB is not set +# CONFIG_I2C_SLAVE is not set +# CONFIG_I2C_DEBUG_CORE is not set +# CONFIG_I2C_DEBUG_ALGO is not set +# CONFIG_I2C_DEBUG_BUS is not set +CONFIG_SPI=y +# CONFIG_SPI_DEBUG is not set +CONFIG_SPI_MASTER=y + +# +# SPI Master Controller Drivers +# +# CONFIG_SPI_ALTERA is not set +# CONFIG_SPI_BITBANG is not set +# CONFIG_SPI_CADENCE is not set +# CONFIG_SPI_GPIO is not set +# CONFIG_SPI_FSL_SPI is not set +# CONFIG_SPI_FSL_DSPI is not set +# CONFIG_SPI_MT65XX is not set +# CONFIG_SPI_OC_TINY is not set +CONFIG_SPI_PL022=y +# CONFIG_SPI_PXA2XX is not set +# CONFIG_SPI_PXA2XX_PCI is not set +# CONFIG_SPI_ROCKCHIP is not set +CONFIG_SPI_QUP=y +# CONFIG_SPI_S3C64XX is not set +# CONFIG_SPI_SC18IS602 is not set +# CONFIG_SPI_TEGRA20_SFLASH is not set +# CONFIG_SPI_XCOMM is not set +# CONFIG_SPI_XILINX is not set +# CONFIG_SPI_ZYNQMP_GQSPI is not set +# CONFIG_SPI_DESIGNWARE is not set + +# +# SPI Protocol Masters +# +# CONFIG_SPI_SPIDEV is not set +# CONFIG_SPI_TLE62X0 is not set +# CONFIG_SPMI is not set +# CONFIG_HSI is not set + +# +# PPS support +# +# CONFIG_PPS is not set + +# +# PPS generators support +# + +# +# PTP clock support +# +# CONFIG_PTP_1588_CLOCK is not set + +# +# Enable PHYLIB and NETWORK_PHY_TIMESTAMPING to see the additional clocks. +# +CONFIG_PINCTRL=y + +# +# Pin controllers +# +CONFIG_PINMUX=y +CONFIG_PINCONF=y +CONFIG_GENERIC_PINCONF=y +# CONFIG_DEBUG_PINCTRL is not set +# CONFIG_PINCTRL_AMD is not set +CONFIG_PINCTRL_ROCKCHIP=y +# CONFIG_PINCTRL_SINGLE is not set +CONFIG_PINCTRL_TEGRA=y +CONFIG_PINCTRL_TEGRA124=y +CONFIG_PINCTRL_TEGRA_XUSB=y +# CONFIG_PINCTRL_BERLIN_BG2 is not set +# CONFIG_PINCTRL_BERLIN_BG2CD is not set +# CONFIG_PINCTRL_BERLIN_BG2Q is not set +# CONFIG_PINCTRL_BERLIN_BG4CT is not set +CONFIG_PINCTRL_MSM=y +# CONFIG_PINCTRL_APQ8064 is not set +# CONFIG_PINCTRL_APQ8084 is not set +# CONFIG_PINCTRL_IPQ8064 is not set +# CONFIG_PINCTRL_MSM8660 is not set +# CONFIG_PINCTRL_MSM8960 is not set +# CONFIG_PINCTRL_MSM8X74 is not set +CONFIG_PINCTRL_MSM8916=y +# CONFIG_PINCTRL_QCOM_SSBI_PMIC is not set +CONFIG_PINCTRL_SAMSUNG=y +CONFIG_PINCTRL_EXYNOS=y +CONFIG_PINCTRL_MTK_COMMON=y +# CONFIG_PINCTRL_MT8135 is not set +# CONFIG_PINCTRL_MT8127 is not set +CONFIG_PINCTRL_MT8173=y +# CONFIG_PINCTRL_MT6397 is not set +CONFIG_ARCH_WANT_OPTIONAL_GPIOLIB=y +CONFIG_ARCH_REQUIRE_GPIOLIB=y +CONFIG_GPIOLIB=y +CONFIG_GPIO_DEVRES=y +CONFIG_OF_GPIO=y +CONFIG_GPIOLIB_IRQCHIP=y +# CONFIG_DEBUG_GPIO is not set +# CONFIG_GPIO_SYSFS is not set +CONFIG_GPIO_GENERIC=y + +# +# Memory mapped GPIO drivers +# +# CONFIG_GPIO_74XX_MMIO is not set +# CONFIG_GPIO_ALTERA is not set +# CONFIG_GPIO_DWAPB is not set +CONFIG_GPIO_GENERIC_PLATFORM=y +# CONFIG_GPIO_GRGPIO is not set +CONFIG_GPIO_PL061=y +# CONFIG_GPIO_SYSCON is not set +# CONFIG_GPIO_VX855 is not set +CONFIG_GPIO_XGENE=y +# CONFIG_GPIO_XGENE_SB is not set +# CONFIG_GPIO_XILINX is not set +# CONFIG_GPIO_ZYNQ is not set +# CONFIG_GPIO_ZX is not set + +# +# I2C GPIO expanders +# +# CONFIG_GPIO_ADP5588 is not set +# CONFIG_GPIO_ADNP is not set +# CONFIG_GPIO_MAX7300 is not set +# CONFIG_GPIO_MAX732X is not set +# CONFIG_GPIO_PCA953X is not set +# CONFIG_GPIO_PCF857X is not set +# CONFIG_GPIO_SX150X is not set + +# +# MFD GPIO expanders +# + +# +# PCI GPIO expanders +# +# CONFIG_GPIO_AMD8111 is not set +# CONFIG_GPIO_BT8XX is not set +# CONFIG_GPIO_ML_IOH is not set +# CONFIG_GPIO_RDC321X is not set + +# +# SPI GPIO expanders +# +# CONFIG_GPIO_74X164 is not set +# CONFIG_GPIO_MAX7301 is not set +# CONFIG_GPIO_MC33880 is not set + +# +# SPI or I2C GPIO expanders +# +# CONFIG_GPIO_MCP23S08 is not set + +# +# USB GPIO expanders +# +# CONFIG_W1 is not set +CONFIG_POWER_SUPPLY=y +# CONFIG_POWER_SUPPLY_DEBUG is not set +# CONFIG_PDA_POWER is not set +# CONFIG_TEST_POWER is not set +# CONFIG_BATTERY_DS2780 is not set +# CONFIG_BATTERY_DS2781 is not set +# CONFIG_BATTERY_DS2782 is not set +# CONFIG_BATTERY_SBS is not set +# CONFIG_BATTERY_BQ27XXX is not set +# CONFIG_BATTERY_MAX17040 is not set +# CONFIG_BATTERY_MAX17042 is not set +# CONFIG_CHARGER_MAX8903 is not set +# CONFIG_CHARGER_LP8727 is not set +# CONFIG_CHARGER_GPIO is not set +# CONFIG_CHARGER_MANAGER is not set +# CONFIG_CHARGER_BQ2415X is not set +# CONFIG_CHARGER_BQ24190 is not set +# CONFIG_CHARGER_BQ24735 is not set +# CONFIG_CHARGER_BQ25890 is not set +# CONFIG_CHARGER_SMB347 is not set +# CONFIG_BATTERY_GAUGE_LTC2941 is not set +# CONFIG_CHARGER_RT9455 is not set +CONFIG_POWER_RESET=y +# CONFIG_POWER_RESET_GPIO is not set +# CONFIG_POWER_RESET_GPIO_RESTART is not set +# CONFIG_POWER_RESET_HISI is not set +# CONFIG_POWER_RESET_MSM is not set +# CONFIG_POWER_RESET_LTC2952 is not set +# CONFIG_POWER_RESET_RESTART is not set +CONFIG_POWER_RESET_VEXPRESS=y +CONFIG_POWER_RESET_XGENE=y +CONFIG_POWER_RESET_SYSCON=y +# CONFIG_POWER_RESET_SYSCON_POWEROFF is not set +# CONFIG_POWER_AVS is not set +# CONFIG_HWMON is not set +# CONFIG_THERMAL is not set +# CONFIG_WATCHDOG is not set +CONFIG_SSB_POSSIBLE=y + +# +# Sonics Silicon Backplane +# +# CONFIG_SSB is not set +CONFIG_BCMA_POSSIBLE=y + +# +# Broadcom specific AMBA +# +# CONFIG_BCMA is not set + +# +# Multifunction device drivers +# +CONFIG_MFD_CORE=y +# CONFIG_MFD_AS3711 is not set +# CONFIG_MFD_AS3722 is not set +# CONFIG_PMIC_ADP5520 is not set +# CONFIG_MFD_AAT2870_CORE is not set +# CONFIG_MFD_ATMEL_FLEXCOM is not set +# CONFIG_MFD_ATMEL_HLCDC is not set +# CONFIG_MFD_BCM590XX is not set +# CONFIG_MFD_AXP20X is not set +# CONFIG_PMIC_DA903X is not set +# CONFIG_MFD_DA9052_SPI is not set +# CONFIG_MFD_DA9052_I2C is not set +# CONFIG_MFD_DA9055 is not set +# CONFIG_MFD_DA9062 is not set +# CONFIG_MFD_DA9063 is not set +# CONFIG_MFD_DA9150 is not set +# CONFIG_MFD_DLN2 is not set +# CONFIG_MFD_MC13XXX_SPI is not set +# CONFIG_MFD_MC13XXX_I2C is not set +# CONFIG_MFD_HI6421_PMIC is not set +# CONFIG_HTC_PASIC3 is not set +# CONFIG_HTC_I2CPLD is not set +# CONFIG_LPC_ICH is not set +# CONFIG_LPC_SCH is not set +# CONFIG_INTEL_SOC_PMIC is not set +# CONFIG_MFD_JANZ_CMODIO is not set +# CONFIG_MFD_KEMPLD is not set +# CONFIG_MFD_88PM800 is not set +# CONFIG_MFD_88PM805 is not set +# CONFIG_MFD_88PM860X is not set +# CONFIG_MFD_MAX14577 is not set +# CONFIG_MFD_MAX77686 is not set +# CONFIG_MFD_MAX77693 is not set +# CONFIG_MFD_MAX77843 is not set +# CONFIG_MFD_MAX8907 is not set +# CONFIG_MFD_MAX8925 is not set +# CONFIG_MFD_MAX8997 is not set +# CONFIG_MFD_MAX8998 is not set +# CONFIG_MFD_MT6397 is not set +# CONFIG_MFD_MENF21BMC is not set +# CONFIG_EZX_PCAP is not set +# CONFIG_MFD_VIPERBOARD is not set +# CONFIG_MFD_RETU is not set +# CONFIG_MFD_PCF50633 is not set +# CONFIG_MFD_QCOM_RPM is not set +# CONFIG_MFD_RDC321X is not set +# CONFIG_MFD_RTSX_PCI is not set +# CONFIG_MFD_RT5033 is not set +# CONFIG_MFD_RTSX_USB is not set +# CONFIG_MFD_RC5T583 is not set +# CONFIG_MFD_RK808 is not set +# CONFIG_MFD_RN5T618 is not set +# CONFIG_MFD_SEC_CORE is not set +# CONFIG_MFD_SI476X_CORE is not set +# CONFIG_MFD_SM501 is not set +# CONFIG_MFD_SKY81452 is not set +# CONFIG_MFD_SMSC is not set +# CONFIG_ABX500_CORE is not set +# CONFIG_MFD_STMPE is not set +CONFIG_MFD_SYSCON=y +# CONFIG_MFD_TI_AM335X_TSCADC is not set +# CONFIG_MFD_LP3943 is not set +# CONFIG_MFD_LP8788 is not set +# CONFIG_MFD_PALMAS is not set +# CONFIG_TPS6105X is not set +# CONFIG_TPS65010 is not set +# CONFIG_TPS6507X is not set +# CONFIG_MFD_TPS65090 is not set +# CONFIG_MFD_TPS65217 is not set +# CONFIG_MFD_TPS65218 is not set +# CONFIG_MFD_TPS6586X is not set +# CONFIG_MFD_TPS65910 is not set +# CONFIG_MFD_TPS65912 is not set +# CONFIG_MFD_TPS65912_I2C is not set +# CONFIG_MFD_TPS65912_SPI is not set +# CONFIG_MFD_TPS80031 is not set +# CONFIG_TWL4030_CORE is not set +# CONFIG_TWL6040_CORE is not set +# CONFIG_MFD_WL1273_CORE is not set +# CONFIG_MFD_LM3533 is not set +# CONFIG_MFD_TC3589X is not set +# CONFIG_MFD_TMIO is not set +# CONFIG_MFD_VX855 is not set +# CONFIG_MFD_ARIZONA_I2C is not set +# CONFIG_MFD_ARIZONA_SPI is not set +# CONFIG_MFD_WM8400 is not set +# CONFIG_MFD_WM831X_I2C is not set +# CONFIG_MFD_WM831X_SPI is not set +# CONFIG_MFD_WM8350_I2C is not set +# CONFIG_MFD_WM8994 is not set +CONFIG_MFD_VEXPRESS_SYSREG=y +CONFIG_REGULATOR=y +# CONFIG_REGULATOR_DEBUG is not set +CONFIG_REGULATOR_FIXED_VOLTAGE=y +# CONFIG_REGULATOR_VIRTUAL_CONSUMER is not set +# CONFIG_REGULATOR_USERSPACE_CONSUMER is not set +# CONFIG_REGULATOR_ACT8865 is not set +# CONFIG_REGULATOR_AD5398 is not set +# CONFIG_REGULATOR_ANATOP is not set +# CONFIG_REGULATOR_DA9210 is not set +# CONFIG_REGULATOR_DA9211 is not set +# CONFIG_REGULATOR_FAN53555 is not set +# CONFIG_REGULATOR_GPIO is not set +# CONFIG_REGULATOR_ISL9305 is not set +# CONFIG_REGULATOR_ISL6271A is not set +# CONFIG_REGULATOR_LP3971 is not set +# CONFIG_REGULATOR_LP3972 is not set +# CONFIG_REGULATOR_LP872X is not set +# CONFIG_REGULATOR_LP8755 is not set +# CONFIG_REGULATOR_LTC3589 is not set +# CONFIG_REGULATOR_MAX1586 is not set +# CONFIG_REGULATOR_MAX8649 is not set +# CONFIG_REGULATOR_MAX8660 is not set +# CONFIG_REGULATOR_MAX8952 is not set +# CONFIG_REGULATOR_MAX8973 is not set +# CONFIG_REGULATOR_MT6311 is not set +# CONFIG_REGULATOR_PFUZE100 is not set +CONFIG_REGULATOR_QCOM_SMD_RPM=y +# CONFIG_REGULATOR_TPS51632 is not set +# CONFIG_REGULATOR_TPS62360 is not set +# CONFIG_REGULATOR_TPS65023 is not set +# CONFIG_REGULATOR_TPS6507X is not set +# CONFIG_REGULATOR_TPS6524X is not set +# CONFIG_REGULATOR_VEXPRESS is not set +# CONFIG_MEDIA_SUPPORT is not set + +# +# Graphics support +# +CONFIG_VGA_ARB=y +CONFIG_VGA_ARB_MAX_GPUS=16 +# CONFIG_TEGRA_HOST1X is not set +# CONFIG_DRM is not set + +# +# Frame buffer Devices +# +CONFIG_FB=y +# CONFIG_FIRMWARE_EDID is not set +CONFIG_FB_CMDLINE=y +# CONFIG_FB_DDC is not set +# CONFIG_FB_BOOT_VESA_SUPPORT is not set +CONFIG_FB_CFB_FILLRECT=y +CONFIG_FB_CFB_COPYAREA=y +CONFIG_FB_CFB_IMAGEBLIT=y +# CONFIG_FB_CFB_REV_PIXELS_IN_BYTE is not set +# CONFIG_FB_SYS_FILLRECT is not set +# CONFIG_FB_SYS_COPYAREA is not set +# CONFIG_FB_SYS_IMAGEBLIT is not set +# CONFIG_FB_FOREIGN_ENDIAN is not set +# CONFIG_FB_SYS_FOPS is not set +# CONFIG_FB_SVGALIB is not set +# CONFIG_FB_MACMODES is not set +# CONFIG_FB_BACKLIGHT is not set +CONFIG_FB_MODE_HELPERS=y +# CONFIG_FB_TILEBLITTING is not set + +# +# Frame buffer hardware drivers +# +# CONFIG_FB_CIRRUS is not set +# CONFIG_FB_PM2 is not set +CONFIG_FB_ARMCLCD=y +# CONFIG_FB_CYBER2000 is not set +# CONFIG_FB_ASILIANT is not set +# CONFIG_FB_IMSTT is not set +# CONFIG_FB_OPENCORES is not set +# CONFIG_FB_S1D13XXX is not set +# CONFIG_FB_NVIDIA is not set +# CONFIG_FB_RIVA is not set +# CONFIG_FB_I740 is not set +# CONFIG_FB_MATROX is not set +# CONFIG_FB_RADEON is not set +# CONFIG_FB_ATY128 is not set +# CONFIG_FB_ATY is not set +# CONFIG_FB_S3 is not set +# CONFIG_FB_SAVAGE is not set +# CONFIG_FB_SIS is not set +# CONFIG_FB_NEOMAGIC is not set +# CONFIG_FB_KYRO is not set +# CONFIG_FB_3DFX is not set +# CONFIG_FB_VOODOO1 is not set +# CONFIG_FB_VT8623 is not set +# CONFIG_FB_TRIDENT is not set +# CONFIG_FB_ARK is not set +# CONFIG_FB_PM3 is not set +# CONFIG_FB_CARMINE is not set +# CONFIG_FB_S3C is not set +# CONFIG_FB_SMSCUFX is not set +# CONFIG_FB_UDL is not set +# CONFIG_FB_IBM_GXT4500 is not set +# CONFIG_FB_VIRTUAL is not set +# CONFIG_FB_METRONOME is not set +# CONFIG_FB_MB862XX is not set +# CONFIG_FB_BROADSHEET is not set +# CONFIG_FB_AUO_K190X is not set +# CONFIG_FB_SIMPLE is not set +# CONFIG_EXYNOS_VIDEO is not set +# CONFIG_FB_SSD1307 is not set +# CONFIG_FB_SM712 is not set +# CONFIG_BACKLIGHT_LCD_SUPPORT is not set +# CONFIG_VGASTATE is not set +CONFIG_VIDEOMODE_HELPERS=y + +# +# Console display driver support +# +CONFIG_DUMMY_CONSOLE=y +CONFIG_DUMMY_CONSOLE_COLUMNS=80 +CONFIG_DUMMY_CONSOLE_ROWS=25 +CONFIG_FRAMEBUFFER_CONSOLE=y +# CONFIG_FRAMEBUFFER_CONSOLE_DETECT_PRIMARY is not set +# CONFIG_FRAMEBUFFER_CONSOLE_ROTATION is not set +CONFIG_LOGO=y +# CONFIG_LOGO_LINUX_MONO is not set +# CONFIG_LOGO_LINUX_VGA16 is not set +CONFIG_LOGO_LINUX_CLUT224=y +# CONFIG_SOUND is not set + +# +# HID support +# +CONFIG_HID=y +# CONFIG_HID_BATTERY_STRENGTH is not set +# CONFIG_HIDRAW is not set +# CONFIG_UHID is not set +CONFIG_HID_GENERIC=y + +# +# Special HID drivers +# +CONFIG_HID_A4TECH=y +# CONFIG_HID_ACRUX is not set +CONFIG_HID_APPLE=y +# CONFIG_HID_APPLEIR is not set +# CONFIG_HID_AUREAL is not set +CONFIG_HID_BELKIN=y +# CONFIG_HID_BETOP_FF is not set +CONFIG_HID_CHERRY=y +CONFIG_HID_CHICONY=y +# CONFIG_HID_CORSAIR is not set +# CONFIG_HID_CP2112 is not set +CONFIG_HID_CYPRESS=y +# CONFIG_HID_DRAGONRISE is not set +# CONFIG_HID_EMS_FF is not set +# CONFIG_HID_ELECOM is not set +# CONFIG_HID_ELO is not set +CONFIG_HID_EZKEY=y +# CONFIG_HID_GEMBIRD is not set +# CONFIG_HID_GFRM is not set +# CONFIG_HID_HOLTEK is not set +# CONFIG_HID_GT683R is not set +# CONFIG_HID_KEYTOUCH is not set +# CONFIG_HID_KYE is not set +# CONFIG_HID_UCLOGIC is not set +# CONFIG_HID_WALTOP is not set +# CONFIG_HID_GYRATION is not set +# CONFIG_HID_ICADE is not set +# CONFIG_HID_TWINHAN is not set +CONFIG_HID_KENSINGTON=y +# CONFIG_HID_LCPOWER is not set +# CONFIG_HID_LENOVO is not set +CONFIG_HID_LOGITECH=y +# CONFIG_HID_LOGITECH_HIDPP is not set +# CONFIG_LOGITECH_FF is not set +# CONFIG_LOGIRUMBLEPAD2_FF is not set +# CONFIG_LOGIG940_FF is not set +# CONFIG_LOGIWHEELS_FF is not set +# CONFIG_HID_MAGICMOUSE is not set +CONFIG_HID_MICROSOFT=y +CONFIG_HID_MONTEREY=y +# CONFIG_HID_MULTITOUCH is not set +# CONFIG_HID_NTRIG is not set +# CONFIG_HID_ORTEK is not set +# CONFIG_HID_PANTHERLORD is not set +# CONFIG_HID_PENMOUNT is not set +# CONFIG_HID_PETALYNX is not set +# CONFIG_HID_PICOLCD is not set +# CONFIG_HID_PLANTRONICS is not set +# CONFIG_HID_PRIMAX is not set +# CONFIG_HID_ROCCAT is not set +# CONFIG_HID_SAITEK is not set +# CONFIG_HID_SAMSUNG is not set +# CONFIG_HID_SONY is not set +# CONFIG_HID_SPEEDLINK is not set +# CONFIG_HID_STEELSERIES is not set +# CONFIG_HID_SUNPLUS is not set +# CONFIG_HID_RMI is not set +# CONFIG_HID_GREENASIA is not set +# CONFIG_HID_SMARTJOYPLUS is not set +# CONFIG_HID_TIVO is not set +# CONFIG_HID_TOPSEED is not set +# CONFIG_HID_THINGM is not set +# CONFIG_HID_THRUSTMASTER is not set +# CONFIG_HID_WACOM is not set +# CONFIG_HID_WIIMOTE is not set +# CONFIG_HID_XINMO is not set +# CONFIG_HID_ZEROPLUS is not set +# CONFIG_HID_ZYDACRON is not set +# CONFIG_HID_SENSOR_HUB is not set + +# +# USB HID support +# +CONFIG_USB_HID=y +# CONFIG_HID_PID is not set +# CONFIG_USB_HIDDEV is not set + +# +# I2C HID support +# +# CONFIG_I2C_HID is not set +CONFIG_USB_OHCI_LITTLE_ENDIAN=y +CONFIG_USB_SUPPORT=y +CONFIG_USB_COMMON=y +CONFIG_USB_ARCH_HAS_HCD=y +CONFIG_USB=y +# CONFIG_USB_ANNOUNCE_NEW_DEVICES is not set + +# +# Miscellaneous USB options +# +CONFIG_USB_DEFAULT_PERSIST=y +# CONFIG_USB_DYNAMIC_MINORS is not set +# CONFIG_USB_OTG is not set +# CONFIG_USB_OTG_WHITELIST is not set +# CONFIG_USB_ULPI_BUS is not set +# CONFIG_USB_MON is not set +# CONFIG_USB_WUSB_CBAF is not set + +# +# USB Host Controller Drivers +# +# CONFIG_USB_C67X00_HCD is not set +# CONFIG_USB_XHCI_HCD is not set +CONFIG_USB_EHCI_HCD=y +# CONFIG_USB_EHCI_ROOT_HUB_TT is not set +CONFIG_USB_EHCI_TT_NEWSCHED=y +CONFIG_USB_EHCI_PCI=y +# CONFIG_USB_EHCI_MSM is not set +# CONFIG_USB_EHCI_TEGRA is not set +# CONFIG_USB_EHCI_EXYNOS is not set +CONFIG_USB_EHCI_HCD_PLATFORM=y +# CONFIG_USB_OXU210HP_HCD is not set +# CONFIG_USB_ISP116X_HCD is not set +# CONFIG_USB_ISP1362_HCD is not set +# CONFIG_USB_FOTG210_HCD is not set +# CONFIG_USB_MAX3421_HCD is not set +CONFIG_USB_OHCI_HCD=y +CONFIG_USB_OHCI_HCD_PCI=y +# CONFIG_USB_OHCI_EXYNOS is not set +CONFIG_USB_OHCI_HCD_PLATFORM=y +# CONFIG_USB_UHCI_HCD is not set +# CONFIG_USB_SL811_HCD is not set +# CONFIG_USB_R8A66597_HCD is not set +# CONFIG_USB_HCD_TEST_MODE is not set + +# +# USB Device Class drivers +# +# CONFIG_USB_ACM is not set +# CONFIG_USB_PRINTER is not set +# CONFIG_USB_WDM is not set +# CONFIG_USB_TMC is not set + +# +# NOTE: USB_STORAGE depends on SCSI but BLK_DEV_SD may +# + +# +# also be needed; see USB_STORAGE Help for more info +# +CONFIG_USB_STORAGE=y +# CONFIG_USB_STORAGE_DEBUG is not set +# CONFIG_USB_STORAGE_REALTEK is not set +# CONFIG_USB_STORAGE_DATAFAB is not set +# CONFIG_USB_STORAGE_FREECOM is not set +# CONFIG_USB_STORAGE_ISD200 is not set +# CONFIG_USB_STORAGE_USBAT is not set +# CONFIG_USB_STORAGE_SDDR09 is not set +# CONFIG_USB_STORAGE_SDDR55 is not set +# CONFIG_USB_STORAGE_JUMPSHOT is not set +# CONFIG_USB_STORAGE_ALAUDA is not set +# CONFIG_USB_STORAGE_ONETOUCH is not set +# CONFIG_USB_STORAGE_KARMA is not set +# CONFIG_USB_STORAGE_CYPRESS_ATACB is not set +# CONFIG_USB_STORAGE_ENE_UB6250 is not set +# CONFIG_USB_UAS is not set + +# +# USB Imaging devices +# +# CONFIG_USB_MDC800 is not set +# CONFIG_USB_MICROTEK is not set +# CONFIG_USBIP_CORE is not set +# CONFIG_USB_MUSB_HDRC is not set +# CONFIG_USB_DWC3 is not set +# CONFIG_USB_DWC2 is not set +# CONFIG_USB_CHIPIDEA is not set +CONFIG_USB_ISP1760=y +CONFIG_USB_ISP1760_HCD=y +CONFIG_USB_ISP1760_HOST_ROLE=y + +# +# USB port drivers +# +# CONFIG_USB_SERIAL is not set + +# +# USB Miscellaneous drivers +# +# CONFIG_USB_EMI62 is not set +# CONFIG_USB_EMI26 is not set +# CONFIG_USB_ADUTUX is not set +# CONFIG_USB_SEVSEG is not set +# CONFIG_USB_RIO500 is not set +# CONFIG_USB_LEGOTOWER is not set +# CONFIG_USB_LCD is not set +# CONFIG_USB_LED is not set +# CONFIG_USB_CYPRESS_CY7C63 is not set +# CONFIG_USB_CYTHERM is not set +# CONFIG_USB_IDMOUSE is not set +# CONFIG_USB_FTDI_ELAN is not set +# CONFIG_USB_APPLEDISPLAY is not set +# CONFIG_USB_SISUSBVGA is not set +# CONFIG_USB_LD is not set +# CONFIG_USB_TRANCEVIBRATOR is not set +# CONFIG_USB_IOWARRIOR is not set +# CONFIG_USB_TEST is not set +# CONFIG_USB_EHSET_TEST_FIXTURE is not set +# CONFIG_USB_ISIGHTFW is not set +# CONFIG_USB_YUREX is not set +# CONFIG_USB_EZUSB_FX2 is not set +# CONFIG_USB_HSIC_USB3503 is not set +# CONFIG_USB_LINK_LAYER_TEST is not set + +# +# USB Physical Layer drivers +# +# CONFIG_USB_PHY is not set +# CONFIG_NOP_USB_XCEIV is not set +# CONFIG_USB_GPIO_VBUS is not set +# CONFIG_USB_ISP1301 is not set +CONFIG_USB_ULPI=y +CONFIG_USB_ULPI_VIEWPORT=y +# CONFIG_USB_GADGET is not set +# CONFIG_USB_LED_TRIG is not set +# CONFIG_UWB is not set +CONFIG_MMC=y +# CONFIG_MMC_DEBUG is not set + +# +# MMC/SD/SDIO Card Drivers +# +CONFIG_MMC_BLOCK=y +CONFIG_MMC_BLOCK_MINORS=8 +CONFIG_MMC_BLOCK_BOUNCE=y +# CONFIG_SDIO_UART is not set +# CONFIG_MMC_TEST is not set + +# +# MMC/SD/SDIO Host Controller Drivers +# +CONFIG_MMC_ARMMMCI=y +CONFIG_MMC_QCOM_DML=y +CONFIG_MMC_SDHCI=y +CONFIG_MMC_SDHCI_IO_ACCESSORS=y +# CONFIG_MMC_SDHCI_PCI is not set +CONFIG_MMC_SDHCI_PLTFM=y +# CONFIG_MMC_SDHCI_OF_ARASAN is not set +# CONFIG_MMC_SDHCI_OF_AT91 is not set +# CONFIG_MMC_SDHCI_OF_ESDHC is not set +# CONFIG_MMC_SDHCI_TEGRA is not set +# CONFIG_MMC_SDHCI_PXAV3 is not set +# CONFIG_MMC_SDHCI_F_SDH30 is not set +CONFIG_MMC_SDHCI_IPROC=y +# CONFIG_MMC_SDHCI_MSM is not set +# CONFIG_MMC_TIFM_SD is not set +CONFIG_MMC_SPI=y +# CONFIG_MMC_CB710 is not set +# CONFIG_MMC_VIA_SDMMC is not set +CONFIG_MMC_DW=y +CONFIG_MMC_DW_PLTFM=y +CONFIG_MMC_DW_EXYNOS=y +# CONFIG_MMC_DW_K3 is not set +# CONFIG_MMC_DW_PCI is not set +# CONFIG_MMC_DW_ROCKCHIP is not set +# CONFIG_MMC_VUB300 is not set +# CONFIG_MMC_USHC is not set +# CONFIG_MMC_USDHI6ROL0 is not set +# CONFIG_MMC_TOSHIBA_PCI is not set +# CONFIG_MMC_MTK is not set +# CONFIG_MEMSTICK is not set +CONFIG_NEW_LEDS=y +CONFIG_LEDS_CLASS=y +# CONFIG_LEDS_CLASS_FLASH is not set + +# +# LED drivers +# +# CONFIG_LEDS_BCM6328 is not set +# CONFIG_LEDS_BCM6358 is not set +# CONFIG_LEDS_LM3530 is not set +# CONFIG_LEDS_LM3642 is not set +# CONFIG_LEDS_PCA9532 is not set +# CONFIG_LEDS_GPIO is not set +# CONFIG_LEDS_LP3944 is not set +# CONFIG_LEDS_LP5521 is not set +# CONFIG_LEDS_LP5523 is not set +# CONFIG_LEDS_LP5562 is not set +# CONFIG_LEDS_LP8501 is not set +# CONFIG_LEDS_LP8860 is not set +# CONFIG_LEDS_PCA955X is not set +# CONFIG_LEDS_PCA963X is not set +# CONFIG_LEDS_DAC124S085 is not set +# CONFIG_LEDS_REGULATOR is not set +# CONFIG_LEDS_BD2802 is not set +# CONFIG_LEDS_INTEL_SS4200 is not set +# CONFIG_LEDS_LT3593 is not set +# CONFIG_LEDS_TCA6507 is not set +# CONFIG_LEDS_TLC591XX is not set +# CONFIG_LEDS_LM355x is not set + +# +# LED driver for blink(1) USB RGB LED is under Special HID drivers (HID_THINGM) +# +# CONFIG_LEDS_BLINKM is not set +CONFIG_LEDS_SYSCON=y + +# +# LED Triggers +# +CONFIG_LEDS_TRIGGERS=y +# CONFIG_LEDS_TRIGGER_TIMER is not set +# CONFIG_LEDS_TRIGGER_ONESHOT is not set +CONFIG_LEDS_TRIGGER_HEARTBEAT=y +# CONFIG_LEDS_TRIGGER_BACKLIGHT is not set +CONFIG_LEDS_TRIGGER_CPU=y +# CONFIG_LEDS_TRIGGER_GPIO is not set +# CONFIG_LEDS_TRIGGER_DEFAULT_ON is not set + +# +# iptables trigger is under Netfilter config (LED target) +# +# CONFIG_LEDS_TRIGGER_TRANSIENT is not set +# CONFIG_LEDS_TRIGGER_CAMERA is not set +# CONFIG_ACCESSIBILITY is not set +# CONFIG_INFINIBAND is not set +CONFIG_EDAC_SUPPORT=y +# CONFIG_EDAC is not set +CONFIG_RTC_LIB=y +CONFIG_RTC_CLASS=y +CONFIG_RTC_HCTOSYS=y +CONFIG_RTC_HCTOSYS_DEVICE="rtc0" +CONFIG_RTC_SYSTOHC=y +CONFIG_RTC_SYSTOHC_DEVICE="rtc0" +# CONFIG_RTC_DEBUG is not set + +# +# RTC interfaces +# +CONFIG_RTC_INTF_SYSFS=y +CONFIG_RTC_INTF_PROC=y +CONFIG_RTC_INTF_DEV=y +# CONFIG_RTC_INTF_DEV_UIE_EMUL is not set +# CONFIG_RTC_DRV_TEST is not set + +# +# I2C RTC drivers +# +# CONFIG_RTC_DRV_ABB5ZES3 is not set +# CONFIG_RTC_DRV_ABX80X is not set +# CONFIG_RTC_DRV_DS1307 is not set +# CONFIG_RTC_DRV_DS1374 is not set +# CONFIG_RTC_DRV_DS1672 is not set +# CONFIG_RTC_DRV_DS3232 is not set +# CONFIG_RTC_DRV_HYM8563 is not set +# CONFIG_RTC_DRV_MAX6900 is not set +# CONFIG_RTC_DRV_RS5C372 is not set +# CONFIG_RTC_DRV_ISL1208 is not set +# CONFIG_RTC_DRV_ISL12022 is not set +# CONFIG_RTC_DRV_ISL12057 is not set +# CONFIG_RTC_DRV_X1205 is not set +# CONFIG_RTC_DRV_PCF2127 is not set +# CONFIG_RTC_DRV_PCF8523 is not set +# CONFIG_RTC_DRV_PCF8563 is not set +# CONFIG_RTC_DRV_PCF85063 is not set +# CONFIG_RTC_DRV_PCF8583 is not set +# CONFIG_RTC_DRV_M41T80 is not set +# CONFIG_RTC_DRV_BQ32K is not set +# CONFIG_RTC_DRV_S35390A is not set +# CONFIG_RTC_DRV_FM3130 is not set +# CONFIG_RTC_DRV_RX8581 is not set +# CONFIG_RTC_DRV_RX8025 is not set +# CONFIG_RTC_DRV_EM3027 is not set +# CONFIG_RTC_DRV_RV3029C2 is not set +# CONFIG_RTC_DRV_RV8803 is not set + +# +# SPI RTC drivers +# +# CONFIG_RTC_DRV_M41T93 is not set +# CONFIG_RTC_DRV_M41T94 is not set +# CONFIG_RTC_DRV_DS1305 is not set +# CONFIG_RTC_DRV_DS1343 is not set +# CONFIG_RTC_DRV_DS1347 is not set +# CONFIG_RTC_DRV_DS1390 is not set +# CONFIG_RTC_DRV_MAX6902 is not set +# CONFIG_RTC_DRV_R9701 is not set +# CONFIG_RTC_DRV_RS5C348 is not set +# CONFIG_RTC_DRV_DS3234 is not set +# CONFIG_RTC_DRV_PCF2123 is not set +# CONFIG_RTC_DRV_RX4581 is not set +# CONFIG_RTC_DRV_MCP795 is not set + +# +# Platform RTC drivers +# +# CONFIG_RTC_DRV_DS1286 is not set +# CONFIG_RTC_DRV_DS1511 is not set +# CONFIG_RTC_DRV_DS1553 is not set +# CONFIG_RTC_DRV_DS1685_FAMILY is not set +# CONFIG_RTC_DRV_DS1742 is not set +# CONFIG_RTC_DRV_DS2404 is not set +CONFIG_RTC_DRV_EFI=y +# CONFIG_RTC_DRV_STK17TA8 is not set +# CONFIG_RTC_DRV_M48T86 is not set +# CONFIG_RTC_DRV_M48T35 is not set +# CONFIG_RTC_DRV_M48T59 is not set +# CONFIG_RTC_DRV_MSM6242 is not set +# CONFIG_RTC_DRV_BQ4802 is not set +# CONFIG_RTC_DRV_RP5C01 is not set +# CONFIG_RTC_DRV_V3020 is not set +# CONFIG_RTC_DRV_ZYNQMP is not set + +# +# on-CPU RTC drivers +# +CONFIG_HAVE_S3C_RTC=y +# CONFIG_RTC_DRV_S3C is not set +# CONFIG_RTC_DRV_PL030 is not set +# CONFIG_RTC_DRV_PL031 is not set +# CONFIG_RTC_DRV_TEGRA is not set +# CONFIG_RTC_DRV_SNVS is not set +CONFIG_RTC_DRV_XGENE=y + +# +# HID Sensor RTC drivers +# +# CONFIG_RTC_DRV_HID_SENSOR_TIME is not set +CONFIG_DMADEVICES=y +# CONFIG_DMADEVICES_DEBUG is not set + +# +# DMA Devices +# +CONFIG_DMA_ENGINE=y +CONFIG_DMA_VIRTUAL_CHANNELS=y +CONFIG_DMA_OF=y +# CONFIG_AMBA_PL08X is not set +# CONFIG_FSL_EDMA is not set +# CONFIG_INTEL_IDMA64 is not set +# CONFIG_PL330_DMA is not set +CONFIG_QCOM_BAM_DMA=y +# CONFIG_TEGRA20_APB_DMA is not set +# CONFIG_XGENE_DMA is not set +# CONFIG_DW_DMAC is not set +# CONFIG_DW_DMAC_PCI is not set + +# +# DMA Clients +# +# CONFIG_ASYNC_TX_DMA is not set +# CONFIG_DMATEST is not set +# CONFIG_AUXDISPLAY is not set +# CONFIG_UIO is not set +# CONFIG_VIRT_DRIVERS is not set +CONFIG_VIRTIO=y + +# +# Virtio drivers +# +CONFIG_VIRTIO_PCI=y +CONFIG_VIRTIO_PCI_LEGACY=y +CONFIG_VIRTIO_BALLOON=y +# CONFIG_VIRTIO_INPUT is not set +CONFIG_VIRTIO_MMIO=y +# CONFIG_VIRTIO_MMIO_CMDLINE_DEVICES is not set + +# +# Microsoft Hyper-V guest support +# +# CONFIG_STAGING is not set +# CONFIG_CHROME_PLATFORMS is not set +CONFIG_CLKDEV_LOOKUP=y +CONFIG_HAVE_CLK_PREPARE=y +CONFIG_COMMON_CLK=y + +# +# Common Clock Framework +# +CONFIG_COMMON_CLK_VERSATILE=y +CONFIG_CLK_SP810=y +CONFIG_CLK_VEXPRESS_OSC=y +# CONFIG_COMMON_CLK_SI5351 is not set +# CONFIG_COMMON_CLK_SI514 is not set +# CONFIG_COMMON_CLK_SI570 is not set +# CONFIG_COMMON_CLK_CDCE925 is not set +# CONFIG_CLK_QORIQ is not set +CONFIG_COMMON_CLK_XGENE=y +# CONFIG_COMMON_CLK_PXA is not set +# CONFIG_COMMON_CLK_CDCE706 is not set +CONFIG_COMMON_CLK_HI6220=y +CONFIG_QCOM_GDSC=y +CONFIG_COMMON_CLK_QCOM=y +# CONFIG_APQ_GCC_8084 is not set +# CONFIG_APQ_MMCC_8084 is not set +# CONFIG_IPQ_GCC_806X is not set +# CONFIG_IPQ_LCC_806X is not set +# CONFIG_MSM_GCC_8660 is not set +CONFIG_MSM_GCC_8916=y +# CONFIG_MSM_GCC_8960 is not set +# CONFIG_MSM_LCC_8960 is not set +# CONFIG_MSM_MMCC_8960 is not set +# CONFIG_MSM_GCC_8974 is not set +# CONFIG_MSM_MMCC_8974 is not set +CONFIG_COMMON_CLK_SAMSUNG=y +CONFIG_HWSPINLOCK=y + +# +# Hardware Spinlock drivers +# +CONFIG_HWSPINLOCK_QCOM=y + +# +# Clock Source drivers +# +CONFIG_CLKSRC_OF=y +CONFIG_CLKSRC_PROBE=y +CONFIG_CLKSRC_MMIO=y +CONFIG_ARM_ARCH_TIMER=y +CONFIG_ARM_ARCH_TIMER_EVTSTREAM=y +# CONFIG_ARM_TIMER_SP804 is not set +# CONFIG_ATMEL_PIT is not set +# CONFIG_SH_TIMER_CMT is not set +# CONFIG_SH_TIMER_MTU2 is not set +# CONFIG_SH_TIMER_TMU is not set +# CONFIG_EM_TIMER_STI is not set +# CONFIG_MAILBOX is not set +# CONFIG_IOMMU_SUPPORT is not set + +# +# Remoteproc drivers +# +# CONFIG_STE_MODEM_RPROC is not set + +# +# Rpmsg drivers +# + +# +# SOC (System On Chip) specific Drivers +# +CONFIG_MTK_INFRACFG=y +# CONFIG_MTK_PMIC_WRAP is not set +CONFIG_MTK_SCPSYS=y +# CONFIG_QCOM_GSBI is not set +CONFIG_QCOM_SMEM=y +CONFIG_QCOM_SMD=y +CONFIG_QCOM_SMD_RPM=y +# CONFIG_ROCKCHIP_PM_DOMAINS is not set +# CONFIG_SUNXI_SRAM is not set +# CONFIG_SOC_TI is not set +# CONFIG_PM_DEVFREQ is not set +# CONFIG_EXTCON is not set +# CONFIG_MEMORY is not set +# CONFIG_IIO is not set +# CONFIG_NTB is not set +# CONFIG_VME_BUS is not set +# CONFIG_PWM is not set +CONFIG_IRQCHIP=y +CONFIG_ARM_GIC=y +CONFIG_ARM_GIC_V2M=y +CONFIG_ARM_GIC_V3=y +CONFIG_ARM_GIC_V3_ITS=y +CONFIG_DW_APB_ICTL=y +# CONFIG_IPACK_BUS is not set +CONFIG_ARCH_HAS_RESET_CONTROLLER=y +CONFIG_RESET_CONTROLLER=y +# CONFIG_FMC is not set + +# +# PHY Subsystem +# +CONFIG_GENERIC_PHY=y +# CONFIG_PHY_BERLIN_USB is not set +# CONFIG_PHY_BERLIN_SATA is not set +CONFIG_PHY_EXYNOS_MIPI_VIDEO=y +# CONFIG_PHY_PXA_28NM_HSIC is not set +# CONFIG_PHY_PXA_28NM_USB2 is not set +CONFIG_PHY_EXYNOS_DP_VIDEO=y +# CONFIG_BCM_KONA_USB2_PHY is not set +# CONFIG_PHY_MT65XX_USB3 is not set +# CONFIG_PHY_QCOM_APQ8064_SATA is not set +# CONFIG_PHY_QCOM_IPQ806X_SATA is not set +# CONFIG_PHY_ROCKCHIP_USB is not set +CONFIG_PHY_XGENE=y +# CONFIG_PHY_QCOM_UFS is not set +# CONFIG_POWERCAP is not set +# CONFIG_MCB is not set + +# +# Performance monitor support +# +CONFIG_ARM_PMU=y +CONFIG_RAS=y +# CONFIG_THUNDERBOLT is not set + +# +# Android +# +# CONFIG_ANDROID is not set +# CONFIG_LIBNVDIMM is not set +# CONFIG_NVMEM is not set +# CONFIG_STM is not set +# CONFIG_STM_DUMMY is not set +# CONFIG_STM_SOURCE_CONSOLE is not set +# CONFIG_INTEL_TH is not set + +# +# FPGA Configuration Support +# +# CONFIG_FPGA is not set + +# +# Firmware Drivers +# +CONFIG_ARM_PSCI_FW=y +# CONFIG_FIRMWARE_MEMMAP is not set +CONFIG_DMIID=y +# CONFIG_DMI_SYSFS is not set + +# +# EFI (Extensible Firmware Interface) Support +# +# CONFIG_EFI_VARS is not set +CONFIG_EFI_ESRT=y +CONFIG_EFI_PARAMS_FROM_FDT=y +CONFIG_EFI_RUNTIME_WRAPPERS=y +CONFIG_EFI_ARMSTUB=y + +# +# File systems +# +CONFIG_DCACHE_WORD_ACCESS=y +CONFIG_EXT2_FS=y +# CONFIG_EXT2_FS_XATTR is not set +CONFIG_EXT3_FS=y +# CONFIG_EXT3_FS_POSIX_ACL is not set +# CONFIG_EXT3_FS_SECURITY is not set +CONFIG_EXT4_FS=y +# CONFIG_EXT4_FS_POSIX_ACL is not set +# CONFIG_EXT4_FS_SECURITY is not set +# CONFIG_EXT4_ENCRYPTION is not set +# CONFIG_EXT4_DEBUG is not set +CONFIG_JBD2=y +# CONFIG_JBD2_DEBUG is not set +CONFIG_FS_MBCACHE=y +# CONFIG_REISERFS_FS is not set +# CONFIG_JFS_FS is not set +# CONFIG_XFS_FS is not set +# CONFIG_GFS2_FS is not set +# CONFIG_BTRFS_FS is not set +# CONFIG_NILFS2_FS is not set +# CONFIG_F2FS_FS is not set +# CONFIG_FS_DAX is not set +# CONFIG_FS_POSIX_ACL is not set +CONFIG_EXPORTFS=y +CONFIG_FILE_LOCKING=y +CONFIG_FSNOTIFY=y +CONFIG_DNOTIFY=y +CONFIG_INOTIFY_USER=y +CONFIG_FANOTIFY=y +CONFIG_FANOTIFY_ACCESS_PERMISSIONS=y +CONFIG_QUOTA=y +# CONFIG_QUOTA_NETLINK_INTERFACE is not set +CONFIG_PRINT_QUOTA_WARNING=y +# CONFIG_QUOTA_DEBUG is not set +# CONFIG_QFMT_V1 is not set +# CONFIG_QFMT_V2 is not set +CONFIG_QUOTACTL=y +CONFIG_AUTOFS4_FS=y +CONFIG_FUSE_FS=y +CONFIG_CUSE=y +# CONFIG_OVERLAY_FS is not set + +# +# Caches +# +# CONFIG_FSCACHE is not set + +# +# CD-ROM/DVD Filesystems +# +# CONFIG_ISO9660_FS is not set +# CONFIG_UDF_FS is not set + +# +# DOS/FAT/NT Filesystems +# +CONFIG_FAT_FS=y +# CONFIG_MSDOS_FS is not set +CONFIG_VFAT_FS=y +CONFIG_FAT_DEFAULT_CODEPAGE=437 +CONFIG_FAT_DEFAULT_IOCHARSET="iso8859-1" +# CONFIG_NTFS_FS is not set + +# +# Pseudo filesystems +# +CONFIG_PROC_FS=y +# CONFIG_PROC_KCORE is not set +CONFIG_PROC_SYSCTL=y +CONFIG_PROC_PAGE_MONITOR=y +# CONFIG_PROC_CHILDREN is not set +CONFIG_KERNFS=y +CONFIG_SYSFS=y +CONFIG_TMPFS=y +# CONFIG_TMPFS_POSIX_ACL is not set +# CONFIG_TMPFS_XATTR is not set +CONFIG_HUGETLBFS=y +CONFIG_HUGETLB_PAGE=y +# CONFIG_CONFIGFS_FS is not set +CONFIG_EFIVAR_FS=y +# CONFIG_MISC_FILESYSTEMS is not set +CONFIG_NETWORK_FILESYSTEMS=y +CONFIG_NFS_FS=y +CONFIG_NFS_V2=y +CONFIG_NFS_V3=y +# CONFIG_NFS_V3_ACL is not set +CONFIG_NFS_V4=y +# CONFIG_NFS_SWAP is not set +# CONFIG_NFS_V4_1 is not set +CONFIG_ROOT_NFS=y +# CONFIG_NFS_USE_LEGACY_DNS is not set +CONFIG_NFS_USE_KERNEL_DNS=y +# CONFIG_NFSD is not set +CONFIG_GRACE_PERIOD=y +CONFIG_LOCKD=y +CONFIG_LOCKD_V4=y +CONFIG_NFS_COMMON=y +CONFIG_SUNRPC=y +CONFIG_SUNRPC_GSS=y +# CONFIG_SUNRPC_DEBUG is not set +# CONFIG_CEPH_FS is not set +# CONFIG_CIFS is not set +# CONFIG_NCP_FS is not set +# CONFIG_CODA_FS is not set +# CONFIG_AFS_FS is not set +CONFIG_9P_FS=y +# CONFIG_9P_FS_POSIX_ACL is not set +# CONFIG_9P_FS_SECURITY is not set +CONFIG_NLS=y +CONFIG_NLS_DEFAULT="iso8859-1" +CONFIG_NLS_CODEPAGE_437=y +# CONFIG_NLS_CODEPAGE_737 is not set +# CONFIG_NLS_CODEPAGE_775 is not set +# CONFIG_NLS_CODEPAGE_850 is not set +# CONFIG_NLS_CODEPAGE_852 is not set +# CONFIG_NLS_CODEPAGE_855 is not set +# CONFIG_NLS_CODEPAGE_857 is not set +# CONFIG_NLS_CODEPAGE_860 is not set +# CONFIG_NLS_CODEPAGE_861 is not set +# CONFIG_NLS_CODEPAGE_862 is not set +# CONFIG_NLS_CODEPAGE_863 is not set +# CONFIG_NLS_CODEPAGE_864 is not set +# CONFIG_NLS_CODEPAGE_865 is not set +# CONFIG_NLS_CODEPAGE_866 is not set +# CONFIG_NLS_CODEPAGE_869 is not set +# CONFIG_NLS_CODEPAGE_936 is not set +# CONFIG_NLS_CODEPAGE_950 is not set +# CONFIG_NLS_CODEPAGE_932 is not set +# CONFIG_NLS_CODEPAGE_949 is not set +# CONFIG_NLS_CODEPAGE_874 is not set +# CONFIG_NLS_ISO8859_8 is not set +# CONFIG_NLS_CODEPAGE_1250 is not set +# CONFIG_NLS_CODEPAGE_1251 is not set +# CONFIG_NLS_ASCII is not set +CONFIG_NLS_ISO8859_1=y +# CONFIG_NLS_ISO8859_2 is not set +# CONFIG_NLS_ISO8859_3 is not set +# CONFIG_NLS_ISO8859_4 is not set +# CONFIG_NLS_ISO8859_5 is not set +# CONFIG_NLS_ISO8859_6 is not set +# CONFIG_NLS_ISO8859_7 is not set +# CONFIG_NLS_ISO8859_9 is not set +# CONFIG_NLS_ISO8859_13 is not set +# CONFIG_NLS_ISO8859_14 is not set +# CONFIG_NLS_ISO8859_15 is not set +# CONFIG_NLS_KOI8_R is not set +# CONFIG_NLS_KOI8_U is not set +# CONFIG_NLS_MAC_ROMAN is not set +# CONFIG_NLS_MAC_CELTIC is not set +# CONFIG_NLS_MAC_CENTEURO is not set +# CONFIG_NLS_MAC_CROATIAN is not set +# CONFIG_NLS_MAC_CYRILLIC is not set +# CONFIG_NLS_MAC_GAELIC is not set +# CONFIG_NLS_MAC_GREEK is not set +# CONFIG_NLS_MAC_ICELAND is not set +# CONFIG_NLS_MAC_INUIT is not set +# CONFIG_NLS_MAC_ROMANIAN is not set +# CONFIG_NLS_MAC_TURKISH is not set +# CONFIG_NLS_UTF8 is not set +CONFIG_HAVE_KVM_IRQFD=y +CONFIG_HAVE_KVM_EVENTFD=y +CONFIG_KVM_MMIO=y +CONFIG_HAVE_KVM_CPU_RELAX_INTERCEPT=y +CONFIG_KVM_VFIO=y +CONFIG_HAVE_KVM_ARCH_TLB_FLUSH_ALL=y +CONFIG_KVM_GENERIC_DIRTYLOG_READ_PROTECT=y +CONFIG_KVM_COMPAT=y +CONFIG_VIRTUALIZATION=y +CONFIG_KVM_ARM_VGIC_V3=y +CONFIG_KVM=y +CONFIG_KVM_ARM_HOST=y + +# +# Kernel hacking +# + +# +# printk and dmesg options +# +# CONFIG_PRINTK_TIME is not set +CONFIG_MESSAGE_LOGLEVEL_DEFAULT=4 +# CONFIG_BOOT_PRINTK_DELAY is not set +# CONFIG_DYNAMIC_DEBUG is not set + +# +# Compile-time checks and compiler options +# +CONFIG_DEBUG_INFO=y +# CONFIG_DEBUG_INFO_REDUCED is not set +# CONFIG_DEBUG_INFO_SPLIT is not set +# CONFIG_DEBUG_INFO_DWARF4 is not set +# CONFIG_GDB_SCRIPTS is not set +CONFIG_ENABLE_WARN_DEPRECATED=y +CONFIG_ENABLE_MUST_CHECK=y +CONFIG_FRAME_WARN=2048 +# CONFIG_STRIP_ASM_SYMS is not set +# CONFIG_READABLE_ASM is not set +# CONFIG_UNUSED_SYMBOLS is not set +# CONFIG_PAGE_OWNER is not set +CONFIG_DEBUG_FS=y +# CONFIG_HEADERS_CHECK is not set +# CONFIG_DEBUG_SECTION_MISMATCH is not set +CONFIG_SECTION_MISMATCH_WARN_ONLY=y +CONFIG_ARCH_WANT_FRAME_POINTERS=y +CONFIG_FRAME_POINTER=y +# CONFIG_DEBUG_FORCE_WEAK_PER_CPU is not set +CONFIG_MAGIC_SYSRQ=y +CONFIG_MAGIC_SYSRQ_DEFAULT_ENABLE=0x1 +CONFIG_DEBUG_KERNEL=y + +# +# Memory Debugging +# +# CONFIG_PAGE_EXTENSION is not set +# CONFIG_DEBUG_PAGEALLOC is not set +# CONFIG_DEBUG_OBJECTS is not set +# CONFIG_SLUB_DEBUG_ON is not set +# CONFIG_SLUB_STATS is not set +CONFIG_HAVE_DEBUG_KMEMLEAK=y +# CONFIG_DEBUG_KMEMLEAK is not set +# CONFIG_DEBUG_STACK_USAGE is not set +# CONFIG_DEBUG_VM is not set +CONFIG_DEBUG_MEMORY_INIT=y +# CONFIG_DEBUG_PER_CPU_MAPS is not set +CONFIG_HAVE_ARCH_KASAN=y +# CONFIG_KASAN is not set +# CONFIG_DEBUG_SHIRQ is not set + +# +# Debug Lockups and Hangs +# +CONFIG_LOCKUP_DETECTOR=y +# CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC is not set +CONFIG_BOOTPARAM_SOFTLOCKUP_PANIC_VALUE=0 +CONFIG_DETECT_HUNG_TASK=y +CONFIG_DEFAULT_HUNG_TASK_TIMEOUT=120 +# CONFIG_BOOTPARAM_HUNG_TASK_PANIC is not set +CONFIG_BOOTPARAM_HUNG_TASK_PANIC_VALUE=0 +# CONFIG_PANIC_ON_OOPS is not set +CONFIG_PANIC_ON_OOPS_VALUE=0 +CONFIG_PANIC_TIMEOUT=0 +# CONFIG_SCHED_DEBUG is not set +CONFIG_SCHED_INFO=y +# CONFIG_SCHEDSTATS is not set +# CONFIG_SCHED_STACK_END_CHECK is not set +# CONFIG_DEBUG_TIMEKEEPING is not set +# CONFIG_TIMER_STATS is not set +# CONFIG_DEBUG_PREEMPT is not set + +# +# Lock Debugging (spinlocks, mutexes, etc...) +# +# CONFIG_DEBUG_RT_MUTEXES is not set +# CONFIG_DEBUG_SPINLOCK is not set +# CONFIG_DEBUG_MUTEXES is not set +# CONFIG_DEBUG_WW_MUTEX_SLOWPATH is not set +# CONFIG_DEBUG_LOCK_ALLOC is not set +# CONFIG_PROVE_LOCKING is not set +# CONFIG_LOCK_STAT is not set +# CONFIG_DEBUG_ATOMIC_SLEEP is not set +# CONFIG_DEBUG_LOCKING_API_SELFTESTS is not set +# CONFIG_LOCK_TORTURE_TEST is not set +# CONFIG_STACKTRACE is not set +# CONFIG_DEBUG_KOBJECT is not set +CONFIG_HAVE_DEBUG_BUGVERBOSE=y +CONFIG_DEBUG_BUGVERBOSE=y +# CONFIG_DEBUG_LIST is not set +# CONFIG_DEBUG_PI_LIST is not set +# CONFIG_DEBUG_SG is not set +# CONFIG_DEBUG_NOTIFIERS is not set +# CONFIG_DEBUG_CREDENTIALS is not set + +# +# RCU Debugging +# +# CONFIG_PROVE_RCU is not set +# CONFIG_SPARSE_RCU_POINTER is not set +# CONFIG_TORTURE_TEST is not set +# CONFIG_RCU_TORTURE_TEST is not set +CONFIG_RCU_CPU_STALL_TIMEOUT=21 +# CONFIG_RCU_TRACE is not set +# CONFIG_RCU_EQS_DEBUG is not set +# CONFIG_DEBUG_BLOCK_EXT_DEVT is not set +# CONFIG_NOTIFIER_ERROR_INJECTION is not set +# CONFIG_FAULT_INJECTION is not set +CONFIG_HAVE_FUNCTION_TRACER=y +CONFIG_HAVE_FUNCTION_GRAPH_TRACER=y +CONFIG_HAVE_DYNAMIC_FTRACE=y +CONFIG_HAVE_FTRACE_MCOUNT_RECORD=y +CONFIG_HAVE_SYSCALL_TRACEPOINTS=y +CONFIG_HAVE_C_RECORDMCOUNT=y +CONFIG_TRACING_SUPPORT=y +# CONFIG_FTRACE is not set + +# +# Runtime Testing +# +# CONFIG_LKDTM is not set +# CONFIG_TEST_LIST_SORT is not set +# CONFIG_BACKTRACE_SELF_TEST is not set +# CONFIG_RBTREE_TEST is not set +# CONFIG_INTERVAL_TREE_TEST is not set +# CONFIG_PERCPU_TEST is not set +# CONFIG_ATOMIC64_SELFTEST is not set +# CONFIG_TEST_HEXDUMP is not set +# CONFIG_TEST_STRING_HELPERS is not set +# CONFIG_TEST_KSTRTOX is not set +# CONFIG_TEST_PRINTF is not set +# CONFIG_TEST_RHASHTABLE is not set +# CONFIG_DMA_API_DEBUG is not set +# CONFIG_TEST_LKM is not set +# CONFIG_TEST_USER_COPY is not set +# CONFIG_TEST_BPF is not set +# CONFIG_TEST_FIRMWARE is not set +# CONFIG_TEST_UDELAY is not set +CONFIG_MEMTEST=y +# CONFIG_TEST_STATIC_KEYS is not set +# CONFIG_SAMPLES is not set +CONFIG_HAVE_ARCH_KGDB=y +# CONFIG_KGDB is not set +# CONFIG_ARM64_PTDUMP is not set +# CONFIG_STRICT_DEVMEM is not set +# CONFIG_PID_IN_CONTEXTIDR is not set +# CONFIG_ARM64_RANDOMIZE_TEXT_OFFSET is not set +# CONFIG_DEBUG_SET_MODULE_RONX is not set +# CONFIG_DEBUG_RODATA is not set +# CONFIG_CORESIGHT is not set + +# +# Security options +# +CONFIG_KEYS=y +# CONFIG_PERSISTENT_KEYRINGS is not set +# CONFIG_BIG_KEYS is not set +# CONFIG_ENCRYPTED_KEYS is not set +# CONFIG_SECURITY_DMESG_RESTRICT is not set +CONFIG_SECURITY=y +# CONFIG_SECURITYFS is not set +# CONFIG_SECURITY_NETWORK is not set +# CONFIG_SECURITY_PATH is not set +# CONFIG_SECURITY_SMACK is not set +# CONFIG_SECURITY_TOMOYO is not set +# CONFIG_SECURITY_APPARMOR is not set +# CONFIG_SECURITY_YAMA is not set +CONFIG_INTEGRITY=y +# CONFIG_INTEGRITY_SIGNATURE is not set +CONFIG_INTEGRITY_AUDIT=y +# CONFIG_IMA is not set +# CONFIG_EVM is not set +CONFIG_DEFAULT_SECURITY_DAC=y +CONFIG_DEFAULT_SECURITY="" +CONFIG_CRYPTO=y + +# +# Crypto core or helper +# +CONFIG_CRYPTO_ALGAPI=y +CONFIG_CRYPTO_ALGAPI2=y +CONFIG_CRYPTO_AEAD=y +CONFIG_CRYPTO_AEAD2=y +CONFIG_CRYPTO_BLKCIPHER=y +CONFIG_CRYPTO_BLKCIPHER2=y +CONFIG_CRYPTO_HASH=y +CONFIG_CRYPTO_HASH2=y +CONFIG_CRYPTO_RNG=y +CONFIG_CRYPTO_RNG2=y +CONFIG_CRYPTO_RNG_DEFAULT=m +CONFIG_CRYPTO_PCOMP2=y +CONFIG_CRYPTO_AKCIPHER2=y +# CONFIG_CRYPTO_RSA is not set +CONFIG_CRYPTO_MANAGER=y +CONFIG_CRYPTO_MANAGER2=y +# CONFIG_CRYPTO_USER is not set +CONFIG_CRYPTO_MANAGER_DISABLE_TESTS=y +# CONFIG_CRYPTO_GF128MUL is not set +CONFIG_CRYPTO_NULL=m +CONFIG_CRYPTO_NULL2=y +# CONFIG_CRYPTO_PCRYPT is not set +CONFIG_CRYPTO_WORKQUEUE=y +CONFIG_CRYPTO_CRYPTD=y +# CONFIG_CRYPTO_MCRYPTD is not set +# CONFIG_CRYPTO_AUTHENC is not set +# CONFIG_CRYPTO_TEST is not set +CONFIG_CRYPTO_ABLK_HELPER=y + +# +# Authenticated Encryption with Associated Data +# +# CONFIG_CRYPTO_CCM is not set +# CONFIG_CRYPTO_GCM is not set +# CONFIG_CRYPTO_CHACHA20POLY1305 is not set +# CONFIG_CRYPTO_SEQIV is not set +CONFIG_CRYPTO_ECHAINIV=m + +# +# Block modes +# +# CONFIG_CRYPTO_CBC is not set +# CONFIG_CRYPTO_CTR is not set +# CONFIG_CRYPTO_CTS is not set +# CONFIG_CRYPTO_ECB is not set +# CONFIG_CRYPTO_LRW is not set +# CONFIG_CRYPTO_PCBC is not set +# CONFIG_CRYPTO_XTS is not set +# CONFIG_CRYPTO_KEYWRAP is not set + +# +# Hash modes +# +# CONFIG_CRYPTO_CMAC is not set +CONFIG_CRYPTO_HMAC=m +# CONFIG_CRYPTO_XCBC is not set +# CONFIG_CRYPTO_VMAC is not set + +# +# Digest +# +CONFIG_CRYPTO_CRC32C=y +# CONFIG_CRYPTO_CRC32 is not set +# CONFIG_CRYPTO_CRCT10DIF is not set +# CONFIG_CRYPTO_GHASH is not set +# CONFIG_CRYPTO_POLY1305 is not set +# CONFIG_CRYPTO_MD4 is not set +# CONFIG_CRYPTO_MD5 is not set +# CONFIG_CRYPTO_MICHAEL_MIC is not set +# CONFIG_CRYPTO_RMD128 is not set +# CONFIG_CRYPTO_RMD160 is not set +# CONFIG_CRYPTO_RMD256 is not set +# CONFIG_CRYPTO_RMD320 is not set +# CONFIG_CRYPTO_SHA1 is not set +CONFIG_CRYPTO_SHA256=m +# CONFIG_CRYPTO_SHA512 is not set +# CONFIG_CRYPTO_TGR192 is not set +# CONFIG_CRYPTO_WP512 is not set + +# +# Ciphers +# +CONFIG_CRYPTO_AES=y +# CONFIG_CRYPTO_ANUBIS is not set +# CONFIG_CRYPTO_ARC4 is not set +# CONFIG_CRYPTO_BLOWFISH is not set +# CONFIG_CRYPTO_CAMELLIA is not set +# CONFIG_CRYPTO_CAST5 is not set +# CONFIG_CRYPTO_CAST6 is not set +# CONFIG_CRYPTO_DES is not set +# CONFIG_CRYPTO_FCRYPT is not set +# CONFIG_CRYPTO_KHAZAD is not set +# CONFIG_CRYPTO_SALSA20 is not set +# CONFIG_CRYPTO_CHACHA20 is not set +# CONFIG_CRYPTO_SEED is not set +# CONFIG_CRYPTO_SERPENT is not set +# CONFIG_CRYPTO_TEA is not set +# CONFIG_CRYPTO_TWOFISH is not set + +# +# Compression +# +# CONFIG_CRYPTO_DEFLATE is not set +# CONFIG_CRYPTO_ZLIB is not set +# CONFIG_CRYPTO_LZO is not set +# CONFIG_CRYPTO_842 is not set +# CONFIG_CRYPTO_LZ4 is not set +# CONFIG_CRYPTO_LZ4HC is not set + +# +# Random Number Generation +# +CONFIG_CRYPTO_ANSI_CPRNG=y +CONFIG_CRYPTO_DRBG_MENU=m +CONFIG_CRYPTO_DRBG_HMAC=y +# CONFIG_CRYPTO_DRBG_HASH is not set +# CONFIG_CRYPTO_DRBG_CTR is not set +CONFIG_CRYPTO_DRBG=m +CONFIG_CRYPTO_JITTERENTROPY=m +# CONFIG_CRYPTO_USER_API_HASH is not set +# CONFIG_CRYPTO_USER_API_SKCIPHER is not set +# CONFIG_CRYPTO_USER_API_RNG is not set +# CONFIG_CRYPTO_USER_API_AEAD is not set +CONFIG_CRYPTO_HW=y +# CONFIG_CRYPTO_DEV_S5P is not set +# CONFIG_CRYPTO_DEV_CCP is not set +# CONFIG_CRYPTO_DEV_QCE is not set +# CONFIG_ASYMMETRIC_KEY_TYPE is not set + +# +# Certificates for signature checking +# +# CONFIG_SYSTEM_TRUSTED_KEYRING is not set +CONFIG_ARM64_CRYPTO=y +CONFIG_CRYPTO_SHA1_ARM64_CE=y +CONFIG_CRYPTO_SHA2_ARM64_CE=y +CONFIG_CRYPTO_GHASH_ARM64_CE=y +CONFIG_CRYPTO_AES_ARM64_CE=y +CONFIG_CRYPTO_AES_ARM64_CE_CCM=y +CONFIG_CRYPTO_AES_ARM64_CE_BLK=y +CONFIG_CRYPTO_AES_ARM64_NEON_BLK=y +CONFIG_CRYPTO_CRC32_ARM64=y +# CONFIG_BINARY_PRINTF is not set + +# +# Library routines +# +CONFIG_BITREVERSE=y +CONFIG_HAVE_ARCH_BITREVERSE=y +CONFIG_RATIONAL=y +CONFIG_GENERIC_STRNCPY_FROM_USER=y +CONFIG_GENERIC_STRNLEN_USER=y +CONFIG_GENERIC_NET_UTILS=y +CONFIG_GENERIC_PCI_IOMAP=y +CONFIG_GENERIC_IO=y +CONFIG_ARCH_USE_CMPXCHG_LOCKREF=y +# CONFIG_CRC_CCITT is not set +CONFIG_CRC16=y +# CONFIG_CRC_T10DIF is not set +CONFIG_CRC_ITU_T=y +CONFIG_CRC32=y +# CONFIG_CRC32_SELFTEST is not set +CONFIG_CRC32_SLICEBY8=y +# CONFIG_CRC32_SLICEBY4 is not set +# CONFIG_CRC32_SARWATE is not set +# CONFIG_CRC32_BIT is not set +CONFIG_CRC7=y +# CONFIG_LIBCRC32C is not set +# CONFIG_CRC8 is not set +CONFIG_AUDIT_GENERIC=y +CONFIG_AUDIT_ARCH_COMPAT_GENERIC=y +CONFIG_AUDIT_COMPAT_GENERIC=y +# CONFIG_RANDOM32_SELFTEST is not set +CONFIG_ZLIB_INFLATE=y +CONFIG_LZO_COMPRESS=y +CONFIG_LZO_DECOMPRESS=y +CONFIG_LZ4_DECOMPRESS=y +CONFIG_XZ_DEC=y +CONFIG_XZ_DEC_X86=y +CONFIG_XZ_DEC_POWERPC=y +CONFIG_XZ_DEC_IA64=y +CONFIG_XZ_DEC_ARM=y +CONFIG_XZ_DEC_ARMTHUMB=y +CONFIG_XZ_DEC_SPARC=y +CONFIG_XZ_DEC_BCJ=y +# CONFIG_XZ_DEC_TEST is not set +CONFIG_DECOMPRESS_GZIP=y +CONFIG_DECOMPRESS_BZIP2=y +CONFIG_DECOMPRESS_LZMA=y +CONFIG_DECOMPRESS_XZ=y +CONFIG_DECOMPRESS_LZO=y +CONFIG_DECOMPRESS_LZ4=y +CONFIG_GENERIC_ALLOCATOR=y +CONFIG_ASSOCIATIVE_ARRAY=y +CONFIG_HAS_IOMEM=y +CONFIG_HAS_IOPORT_MAP=y +CONFIG_HAS_DMA=y +CONFIG_CPU_RMAP=y +CONFIG_DQL=y +CONFIG_GLOB=y +# CONFIG_GLOB_SELFTEST is not set +CONFIG_NLATTR=y +CONFIG_ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE=y +# CONFIG_CORDIC is not set +# CONFIG_DDR is not set +CONFIG_LIBFDT=y +CONFIG_OID_REGISTRY=y +CONFIG_UCS2_STRING=y +CONFIG_FONT_SUPPORT=y +# CONFIG_FONTS is not set +CONFIG_FONT_8x8=y +CONFIG_FONT_8x16=y +# CONFIG_SG_SPLIT is not set +CONFIG_ARCH_HAS_SG_CHAIN=y diff --git a/src/ci/docker/disabled/dist-aarch64-android/Dockerfile b/src/ci/docker/disabled/dist-aarch64-android/Dockerfile index 1c9e036f09..20d823a3d7 100644 --- a/src/ci/docker/disabled/dist-aarch64-android/Dockerfile +++ b/src/ci/docker/disabled/dist-aarch64-android/Dockerfile @@ -3,9 +3,6 @@ FROM ubuntu:16.04 COPY scripts/android-base-apt-get.sh /scripts/ RUN sh /scripts/android-base-apt-get.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/android-ndk.sh /scripts/ RUN . /scripts/android-ndk.sh && \ download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm64 21 @@ -28,5 +25,3 @@ ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/dist-armv7-android/Dockerfile b/src/ci/docker/disabled/dist-armv7-android/Dockerfile index 326e00548b..3435d641a1 100644 --- a/src/ci/docker/disabled/dist-armv7-android/Dockerfile +++ b/src/ci/docker/disabled/dist-armv7-android/Dockerfile @@ -3,9 +3,6 @@ FROM ubuntu:16.04 COPY scripts/android-base-apt-get.sh /scripts/ RUN sh /scripts/android-base-apt-get.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/android-ndk.sh /scripts/ RUN . /scripts/android-ndk.sh && \ download_ndk android-ndk-r13b-linux-x86_64.zip && \ @@ -46,5 +43,3 @@ ENV SCRIPT \ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/dist-i686-android/Dockerfile b/src/ci/docker/disabled/dist-i686-android/Dockerfile index d01648e2b9..4bb7053760 100644 --- a/src/ci/docker/disabled/dist-i686-android/Dockerfile +++ b/src/ci/docker/disabled/dist-i686-android/Dockerfile @@ -3,9 +3,6 @@ FROM ubuntu:16.04 COPY scripts/android-base-apt-get.sh /scripts/ RUN sh /scripts/android-base-apt-get.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/android-ndk.sh /scripts/ RUN . /scripts/android-ndk.sh && \ download_ndk android-ndk-r13b-linux-x86_64.zip && \ @@ -46,5 +43,3 @@ ENV SCRIPT \ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/dist-x86_64-android/Dockerfile b/src/ci/docker/disabled/dist-x86_64-android/Dockerfile index 2622b4b3fa..525b218417 100644 --- a/src/ci/docker/disabled/dist-x86_64-android/Dockerfile +++ b/src/ci/docker/disabled/dist-x86_64-android/Dockerfile @@ -3,9 +3,6 @@ FROM ubuntu:16.04 COPY scripts/android-base-apt-get.sh /scripts/ RUN sh /scripts/android-base-apt-get.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/android-ndk.sh /scripts/ RUN . /scripts/android-ndk.sh && \ download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip x86_64 21 @@ -28,5 +25,3 @@ ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/dist-x86_64-redox/Dockerfile b/src/ci/docker/disabled/dist-x86_64-redox/Dockerfile new file mode 100644 index 0000000000..ed19939545 --- /dev/null +++ b/src/ci/docker/disabled/dist-x86_64-redox/Dockerfile @@ -0,0 +1,22 @@ +FROM ubuntu:16.04 + +COPY scripts/cross-apt-packages.sh /scripts/ +RUN sh /scripts/cross-apt-packages.sh + +COPY scripts/crosstool-ng.sh /scripts/ +RUN sh /scripts/crosstool-ng.sh + +WORKDIR /tmp +COPY cross/install-x86_64-redox.sh /tmp/ +RUN ./install-x86_64-redox.sh + +COPY scripts/sccache.sh /scripts/ +RUN sh /scripts/sccache.sh + +ENV \ + AR_x86_64_unknown_redox=x86_64-unknown-redox-ar \ + CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc \ + CXX_x86_64_unknown_redox=x86_64-unknown-redox-g++ + +ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-redox --enable-extended +ENV SCRIPT python2.7 ../x.py dist --target x86_64-unknown-redox diff --git a/src/ci/docker/disabled/wasm32-exp/Dockerfile b/src/ci/docker/disabled/wasm32-exp/Dockerfile index 4e33788316..6323369421 100644 --- a/src/ci/docker/disabled/wasm32-exp/Dockerfile +++ b/src/ci/docker/disabled/wasm32-exp/Dockerfile @@ -15,10 +15,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ jq \ bzip2 -# dumb-init -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - # emscripten COPY scripts/emscripten-wasm.sh /scripts/ COPY disabled/wasm32-exp/node.sh /usr/local/bin/node @@ -37,6 +33,3 @@ ENV TARGETS=wasm32-experimental-emscripten ENV RUST_CONFIGURE_ARGS --target=$TARGETS --experimental-targets=WebAssembly ENV SCRIPT python2.7 ../x.py test --target $TARGETS - -# init -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/disabled/wasm32/Dockerfile b/src/ci/docker/disabled/wasm32/Dockerfile index 60b15d7afb..7f6f8ae088 100644 --- a/src/ci/docker/disabled/wasm32/Dockerfile +++ b/src/ci/docker/disabled/wasm32/Dockerfile @@ -13,13 +13,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gdb \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - # emscripten COPY scripts/emscripten.sh /scripts/ RUN bash /scripts/emscripten.sh -COPY disabled/wasm32/node.sh /usr/local/bin/node COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh @@ -27,6 +23,7 @@ RUN sh /scripts/sccache.sh ENV PATH=$PATH:/emsdk-portable ENV PATH=$PATH:/emsdk-portable/clang/e1.37.13_64bit/ ENV PATH=$PATH:/emsdk-portable/emscripten/1.37.13/ +ENV PATH=$PATH:/node-v8.0.0-linux-x64/bin/ ENV EMSCRIPTEN=/emsdk-portable/emscripten/1.37.13/ ENV BINARYEN_ROOT=/emsdk-portable/clang/e1.37.13_64bit/binaryen/ ENV EM_CONFIG=/emsdk-portable/.emscripten @@ -36,5 +33,3 @@ ENV TARGETS=wasm32-unknown-emscripten ENV RUST_CONFIGURE_ARGS --target=$TARGETS ENV SCRIPT python2.7 ../x.py test --target $TARGETS - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/dist-aarch64-linux/Dockerfile b/src/ci/docker/dist-aarch64-linux/Dockerfile index cc3b6b8c69..841d301212 100644 --- a/src/ci/docker/dist-aarch64-linux/Dockerfile +++ b/src/ci/docker/dist-aarch64-linux/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-android/Dockerfile b/src/ci/docker/dist-android/Dockerfile index f3beddfae0..a36f7fc1ac 100644 --- a/src/ci/docker/dist-android/Dockerfile +++ b/src/ci/docker/dist-android/Dockerfile @@ -3,9 +3,6 @@ FROM ubuntu:16.04 COPY scripts/android-base-apt-get.sh /scripts/ RUN sh /scripts/android-base-apt-get.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - # ndk COPY scripts/android-ndk.sh /scripts/ RUN . /scripts/android-ndk.sh && \ @@ -36,5 +33,3 @@ ENV SCRIPT python2.7 ../x.py dist --target $TARGETS COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/src/ci/docker/dist-arm-linux/Dockerfile b/src/ci/docker/dist-arm-linux/Dockerfile index 590d6eb98b..ecd5090ea0 100644 --- a/src/ci/docker/dist-arm-linux/Dockerfile +++ b/src/ci/docker/dist-arm-linux/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-armhf-linux/Dockerfile b/src/ci/docker/dist-armhf-linux/Dockerfile index b3dedc4b7f..5bbd17bd41 100644 --- a/src/ci/docker/dist-armhf-linux/Dockerfile +++ b/src/ci/docker/dist-armhf-linux/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-armv7-linux/Dockerfile b/src/ci/docker/dist-armv7-linux/Dockerfile index 82536b68bb..ea9034d717 100644 --- a/src/ci/docker/dist-armv7-linux/Dockerfile +++ b/src/ci/docker/dist-armv7-linux/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-fuchsia/Dockerfile b/src/ci/docker/dist-fuchsia/Dockerfile index 24ad12a490..e18cb453ba 100644 --- a/src/ci/docker/dist-fuchsia/Dockerfile +++ b/src/ci/docker/dist-fuchsia/Dockerfile @@ -24,11 +24,6 @@ WORKDIR /tmp COPY dist-fuchsia/shared.sh dist-fuchsia/build-toolchain.sh dist-fuchsia/compiler-rt-dso-handle.patch /tmp/ RUN /tmp/build-toolchain.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile b/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile index 036dce2a73..efde3ff529 100644 --- a/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile +++ b/src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile @@ -20,11 +20,6 @@ WORKDIR /build/ COPY dist-i586-gnu-i686-musl/musl-libunwind-patch.patch dist-i586-gnu-i686-musl/build-musl.sh /build/ RUN sh /build/build-musl.sh && rm -rf /build -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/dist-i686-freebsd/Dockerfile b/src/ci/docker/dist-i686-freebsd/Dockerfile index c40881332a..b82d227b3e 100644 --- a/src/ci/docker/dist-i686-freebsd/Dockerfile +++ b/src/ci/docker/dist-i686-freebsd/Dockerfile @@ -19,11 +19,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ COPY dist-i686-freebsd/build-toolchain.sh /tmp/ RUN /tmp/build-toolchain.sh i686 -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/dist-i686-freebsd/build-toolchain.sh b/src/ci/docker/dist-i686-freebsd/build-toolchain.sh index 5642e6fc93..8343327c33 100755 --- a/src/ci/docker/dist-i686-freebsd/build-toolchain.sh +++ b/src/ci/docker/dist-i686-freebsd/build-toolchain.sh @@ -13,7 +13,7 @@ set -ex ARCH=$1 BINUTILS=2.25.1 -GCC=5.3.0 +GCC=6.4.0 hide_output() { set +x @@ -86,7 +86,7 @@ rm -rf freebsd # Finally, download and build gcc to target FreeBSD mkdir gcc cd gcc -curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.bz2 | tar xjf - +curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.gz | tar xzf - cd gcc-$GCC ./contrib/download_prerequisites diff --git a/src/ci/docker/dist-i686-linux/Dockerfile b/src/ci/docker/dist-i686-linux/Dockerfile index beb53783ba..a5d776af19 100644 --- a/src/ci/docker/dist-i686-linux/Dockerfile +++ b/src/ci/docker/dist-i686-linux/Dockerfile @@ -76,11 +76,6 @@ RUN ./build-cmake.sh COPY dist-i686-linux/build-headers.sh /tmp/ RUN ./build-headers.sh -RUN curl -Lo /rustroot/dumb-init \ - https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64 && \ - chmod +x /rustroot/dumb-init -ENTRYPOINT ["/rustroot/dumb-init", "--"] - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/dist-i686-linux/build-openssl.sh b/src/ci/docker/dist-i686-linux/build-openssl.sh index 27cd064f90..34da0ed631 100755 --- a/src/ci/docker/dist-i686-linux/build-openssl.sh +++ b/src/ci/docker/dist-i686-linux/build-openssl.sh @@ -13,7 +13,7 @@ set -ex source shared.sh VERSION=1.0.2k -URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/openssl-$VERSION.tar.gz +URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/openssl-$VERSION.tar.gz curl $URL | tar xzf - diff --git a/src/ci/docker/dist-mips-linux/Dockerfile b/src/ci/docker/dist-mips-linux/Dockerfile index 81997e0508..94a3cf8a38 100644 --- a/src/ci/docker/dist-mips-linux/Dockerfile +++ b/src/ci/docker/dist-mips-linux/Dockerfile @@ -17,14 +17,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ pkg-config -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV HOSTS=mips-unknown-linux-gnu ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended diff --git a/src/ci/docker/dist-mips64-linux/Dockerfile b/src/ci/docker/dist-mips64-linux/Dockerfile index 646cb4d256..0b0dfff1fe 100644 --- a/src/ci/docker/dist-mips64-linux/Dockerfile +++ b/src/ci/docker/dist-mips64-linux/Dockerfile @@ -16,14 +16,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libssl-dev \ pkg-config -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV HOSTS=mips64-unknown-linux-gnuabi64 ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended diff --git a/src/ci/docker/dist-mips64el-linux/Dockerfile b/src/ci/docker/dist-mips64el-linux/Dockerfile index 1abb04fd8b..1810b1cdc5 100644 --- a/src/ci/docker/dist-mips64el-linux/Dockerfile +++ b/src/ci/docker/dist-mips64el-linux/Dockerfile @@ -17,14 +17,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ pkg-config -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV HOSTS=mips64el-unknown-linux-gnuabi64 ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended diff --git a/src/ci/docker/dist-mipsel-linux/Dockerfile b/src/ci/docker/dist-mipsel-linux/Dockerfile index a78e39e7d7..f5be074847 100644 --- a/src/ci/docker/dist-mipsel-linux/Dockerfile +++ b/src/ci/docker/dist-mipsel-linux/Dockerfile @@ -16,14 +16,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libssl-dev \ pkg-config -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV HOSTS=mipsel-unknown-linux-gnu ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended diff --git a/src/ci/docker/dist-powerpc-linux/Dockerfile b/src/ci/docker/dist-powerpc-linux/Dockerfile index ed9f883cc8..14ce3654fc 100644 --- a/src/ci/docker/dist-powerpc-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc-linux/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-powerpc64-linux/Dockerfile b/src/ci/docker/dist-powerpc64-linux/Dockerfile index 523211498f..1f6e83e2f4 100644 --- a/src/ci/docker/dist-powerpc64-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc64-linux/Dockerfile @@ -3,10 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 diff --git a/src/ci/docker/dist-powerpc64le-linux/Dockerfile b/src/ci/docker/dist-powerpc64le-linux/Dockerfile index 06b262c515..d4677e1806 100644 --- a/src/ci/docker/dist-powerpc64le-linux/Dockerfile +++ b/src/ci/docker/dist-powerpc64le-linux/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-s390x-linux/Dockerfile b/src/ci/docker/dist-s390x-linux/Dockerfile index 84769af12b..39478e92f7 100644 --- a/src/ci/docker/dist-s390x-linux/Dockerfile +++ b/src/ci/docker/dist-s390x-linux/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-x86_64-freebsd/Dockerfile b/src/ci/docker/dist-x86_64-freebsd/Dockerfile index b62b46701a..b7c00df4db 100644 --- a/src/ci/docker/dist-x86_64-freebsd/Dockerfile +++ b/src/ci/docker/dist-x86_64-freebsd/Dockerfile @@ -19,11 +19,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ COPY dist-x86_64-freebsd/build-toolchain.sh /tmp/ RUN /tmp/build-toolchain.sh x86_64 -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/dist-x86_64-freebsd/build-toolchain.sh b/src/ci/docker/dist-x86_64-freebsd/build-toolchain.sh index 5642e6fc93..8343327c33 100755 --- a/src/ci/docker/dist-x86_64-freebsd/build-toolchain.sh +++ b/src/ci/docker/dist-x86_64-freebsd/build-toolchain.sh @@ -13,7 +13,7 @@ set -ex ARCH=$1 BINUTILS=2.25.1 -GCC=5.3.0 +GCC=6.4.0 hide_output() { set +x @@ -86,7 +86,7 @@ rm -rf freebsd # Finally, download and build gcc to target FreeBSD mkdir gcc cd gcc -curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.bz2 | tar xjf - +curl https://ftp.gnu.org/gnu/gcc/gcc-$GCC/gcc-$GCC.tar.gz | tar xzf - cd gcc-$GCC ./contrib/download_prerequisites diff --git a/src/ci/docker/dist-x86_64-linux/Dockerfile b/src/ci/docker/dist-x86_64-linux/Dockerfile index d0ab47ad3d..a954fd86a2 100644 --- a/src/ci/docker/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/dist-x86_64-linux/Dockerfile @@ -76,11 +76,6 @@ RUN ./build-cmake.sh COPY dist-x86_64-linux/build-headers.sh /tmp/ RUN ./build-headers.sh -RUN curl -Lo /rustroot/dumb-init \ - https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64 && \ - chmod +x /rustroot/dumb-init -ENTRYPOINT ["/rustroot/dumb-init", "--"] - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/dist-x86_64-linux/build-openssl.sh b/src/ci/docker/dist-x86_64-linux/build-openssl.sh index 27cd064f90..34da0ed631 100755 --- a/src/ci/docker/dist-x86_64-linux/build-openssl.sh +++ b/src/ci/docker/dist-x86_64-linux/build-openssl.sh @@ -13,7 +13,7 @@ set -ex source shared.sh VERSION=1.0.2k -URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/openssl-$VERSION.tar.gz +URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/openssl-$VERSION.tar.gz curl $URL | tar xzf - diff --git a/src/ci/docker/dist-x86_64-musl/Dockerfile b/src/ci/docker/dist-x86_64-musl/Dockerfile index eebc283e96..91ed6bfe1f 100644 --- a/src/ci/docker/dist-x86_64-musl/Dockerfile +++ b/src/ci/docker/dist-x86_64-musl/Dockerfile @@ -20,11 +20,6 @@ WORKDIR /build/ COPY dist-x86_64-musl/build-musl.sh /build/ RUN sh /build/build-musl.sh && rm -rf /build -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/dist-x86_64-netbsd/Dockerfile b/src/ci/docker/dist-x86_64-netbsd/Dockerfile index 0c14050026..4fd2503c31 100644 --- a/src/ci/docker/dist-x86_64-netbsd/Dockerfile +++ b/src/ci/docker/dist-x86_64-netbsd/Dockerfile @@ -3,11 +3,6 @@ FROM ubuntu:16.04 COPY scripts/cross-apt-packages.sh /scripts/ RUN sh /scripts/cross-apt-packages.sh -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - # Ubuntu 16.04 (this container) ships with make 4, but something in the # toolchains we build below chokes on that, so go back to make 3 COPY scripts/make3.sh /scripts/ diff --git a/src/ci/docker/dist-x86_64-netbsd/build-netbsd-toolchain.sh b/src/ci/docker/dist-x86_64-netbsd/build-netbsd-toolchain.sh index ea335a2497..54100b49cb 100755 --- a/src/ci/docker/dist-x86_64-netbsd/build-netbsd-toolchain.sh +++ b/src/ci/docker/dist-x86_64-netbsd/build-netbsd-toolchain.sh @@ -35,7 +35,7 @@ cd netbsd mkdir -p /x-tools/x86_64-unknown-netbsd/sysroot -URL=https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror +URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror # Originally from ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-$BSD/source/sets/*.tgz curl $URL/2017-03-17-netbsd-src.tgz | tar xzf - diff --git a/src/ci/docker/i686-gnu-nopt/Dockerfile b/src/ci/docker/i686-gnu-nopt/Dockerfile index 56ff9922ae..5a83bd318c 100644 --- a/src/ci/docker/i686-gnu-nopt/Dockerfile +++ b/src/ci/docker/i686-gnu-nopt/Dockerfile @@ -14,13 +14,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests ENV RUST_CHECK_TARGET check diff --git a/src/ci/docker/i686-gnu/Dockerfile b/src/ci/docker/i686-gnu/Dockerfile index 1c1333cd7c..daa24e0e81 100644 --- a/src/ci/docker/i686-gnu/Dockerfile +++ b/src/ci/docker/i686-gnu/Dockerfile @@ -14,13 +14,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu ENV SCRIPT python2.7 ../x.py test diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index da74ffb41f..7087033e11 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -57,9 +57,10 @@ mkdir -p $objdir/tmp args= if [ "$SCCACHE_BUCKET" != "" ]; then - args="$args --env SCCACHE_BUCKET=$SCCACHE_BUCKET" - args="$args --env AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID" - args="$args --env AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY" + args="$args --env SCCACHE_BUCKET" + args="$args --env SCCACHE_REGION" + args="$args --env AWS_ACCESS_KEY_ID" + args="$args --env AWS_SECRET_ACCESS_KEY" args="$args --env SCCACHE_ERROR_LOG=/tmp/sccache/sccache.log" args="$args --volume $objdir/tmp:/tmp/sccache" else @@ -67,6 +68,13 @@ else args="$args --env SCCACHE_DIR=/sccache --volume $HOME/.cache/sccache:/sccache" fi +# Run containers as privileged as it should give them access to some more +# syscalls such as ptrace and whatnot. In the upgrade to LLVM 5.0 it was +# discovered that the leak sanitizer apparently needs these syscalls nowadays so +# we'll need `--privileged` for at least the `x86_64-gnu` builder, so this just +# goes ahead and sets it for all builders. +args="$args --privileged" + exec docker \ run \ --volume "$root_dir:/checkout:ro" \ @@ -75,13 +83,14 @@ exec docker \ --env SRC=/checkout \ $args \ --env CARGO_HOME=/cargo \ - --env DEPLOY=$DEPLOY \ - --env DEPLOY_ALT=$DEPLOY_ALT \ + --env DEPLOY \ + --env DEPLOY_ALT \ --env LOCAL_USER_ID=`id -u` \ - --env TRAVIS=${TRAVIS-false} \ + --env TRAVIS \ --env TRAVIS_BRANCH \ --volume "$HOME/.cargo:/cargo" \ --volume "$HOME/rustsrc:$HOME/rustsrc" \ + --init \ --rm \ rust-ci \ /checkout/src/ci/run.sh diff --git a/src/ci/docker/scripts/android-ndk.sh b/src/ci/docker/scripts/android-ndk.sh index c3d83c087e..ec030496d3 100644 --- a/src/ci/docker/scripts/android-ndk.sh +++ b/src/ci/docker/scripts/android-ndk.sh @@ -15,7 +15,7 @@ URL=https://dl.google.com/android/repository download_ndk() { mkdir -p /android/ndk cd /android/ndk - curl -O $URL/$1 + curl -fO $URL/$1 unzip -q $1 rm $1 mv android-ndk-* ndk diff --git a/src/ci/docker/scripts/android-sdk.sh b/src/ci/docker/scripts/android-sdk.sh index 7d8110efed..d343aae9df 100644 --- a/src/ci/docker/scripts/android-sdk.sh +++ b/src/ci/docker/scripts/android-sdk.sh @@ -15,7 +15,7 @@ URL=https://dl.google.com/android/repository download_sdk() { mkdir -p /android/sdk cd /android/sdk - curl -O $URL/$1 + curl -fO $URL/$1 unzip -q $1 rm -rf $1 } diff --git a/src/ci/docker/scripts/crosstool-ng.sh b/src/ci/docker/scripts/crosstool-ng.sh index 8b2747cf21..79a5bc3bb4 100644 --- a/src/ci/docker/scripts/crosstool-ng.sh +++ b/src/ci/docker/scripts/crosstool-ng.sh @@ -11,7 +11,7 @@ set -ex url="http://crosstool-ng.org/download/crosstool-ng/crosstool-ng-1.22.0.tar.bz2" -curl $url | tar xjf - +curl -f $url | tar xjf - cd crosstool-ng ./configure --prefix=/usr/local make -j$(nproc) diff --git a/src/ci/docker/scripts/emscripten-wasm.sh b/src/ci/docker/scripts/emscripten-wasm.sh index 0e7da45907..18499060a2 100644 --- a/src/ci/docker/scripts/emscripten-wasm.sh +++ b/src/ci/docker/scripts/emscripten-wasm.sh @@ -28,14 +28,14 @@ exit 1 } # Download last known good emscripten from WebAssembly waterfall -BUILD=$(curl -L https://storage.googleapis.com/wasm-llvm/builds/linux/lkgr.json | \ +BUILD=$(curl -fL https://storage.googleapis.com/wasm-llvm/builds/linux/lkgr.json | \ jq '.build | tonumber') -curl -L https://storage.googleapis.com/wasm-llvm/builds/linux/$BUILD/wasm-binaries.tbz2 | \ +curl -sL https://storage.googleapis.com/wasm-llvm/builds/linux/$BUILD/wasm-binaries.tbz2 | \ hide_output tar xvkj # node 8 is required to run wasm cd / -curl -L https://nodejs.org/dist/v8.0.0/node-v8.0.0-linux-x64.tar.xz | \ +curl -sL https://nodejs.org/dist/v8.0.0/node-v8.0.0-linux-x64.tar.xz | \ tar -xJ # Make emscripten use wasm-ready node and LLVM tools diff --git a/src/ci/docker/scripts/emscripten.sh b/src/ci/docker/scripts/emscripten.sh index cf5eecbdb6..d32ed6b461 100644 --- a/src/ci/docker/scripts/emscripten.sh +++ b/src/ci/docker/scripts/emscripten.sh @@ -28,7 +28,7 @@ exit 1 } cd / -curl -L https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz | \ +curl -fL https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz | \ tar -xz cd /emsdk-portable @@ -49,5 +49,5 @@ chmod a+rxw -R /emsdk-portable # node 8 is required to run wasm cd / -curl -L https://nodejs.org/dist/v8.0.0/node-v8.0.0-linux-x64.tar.xz | \ +curl -sL https://nodejs.org/dist/v8.0.0/node-v8.0.0-linux-x64.tar.xz | \ tar -xJ diff --git a/src/ci/docker/scripts/make3.sh b/src/ci/docker/scripts/make3.sh index 8a7845cb8f..ec6e046c96 100644 --- a/src/ci/docker/scripts/make3.sh +++ b/src/ci/docker/scripts/make3.sh @@ -10,7 +10,7 @@ set -ex -curl https://ftp.gnu.org/gnu/make/make-3.81.tar.gz | tar xzf - +curl -f https://ftp.gnu.org/gnu/make/make-3.81.tar.gz | tar xzf - cd make-3.81 ./configure --prefix=/usr make diff --git a/src/ci/docker/armhf-gnu/addentropy.c b/src/ci/docker/scripts/qemu-bare-bones-addentropy.c similarity index 100% rename from src/ci/docker/armhf-gnu/addentropy.c rename to src/ci/docker/scripts/qemu-bare-bones-addentropy.c diff --git a/src/ci/docker/armhf-gnu/rcS b/src/ci/docker/scripts/qemu-bare-bones-rcS similarity index 100% rename from src/ci/docker/armhf-gnu/rcS rename to src/ci/docker/scripts/qemu-bare-bones-rcS diff --git a/src/ci/docker/scripts/sccache.sh b/src/ci/docker/scripts/sccache.sh index 7a2befaf67..ce2d45563f 100644 --- a/src/ci/docker/scripts/sccache.sh +++ b/src/ci/docker/scripts/sccache.sh @@ -8,9 +8,11 @@ # option. This file may not be copied, modified, or distributed # except according to those terms. +# ignore-tidy-linelength + set -ex -curl -o /usr/local/bin/sccache \ - https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl +curl -fo /usr/local/bin/sccache \ + https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl chmod +x /usr/local/bin/sccache diff --git a/src/ci/docker/x86_64-gnu-aux/Dockerfile b/src/ci/docker/x86_64-gnu-aux/Dockerfile index a449526efc..35a387221c 100644 --- a/src/ci/docker/x86_64-gnu-aux/Dockerfile +++ b/src/ci/docker/x86_64-gnu-aux/Dockerfile @@ -14,13 +14,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ xz-utils \ pkg-config -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu ENV RUST_CHECK_TARGET check-aux diff --git a/src/ci/docker/x86_64-gnu-debug/Dockerfile b/src/ci/docker/x86_64-gnu-debug/Dockerfile index 8111118ad3..95d4102859 100644 --- a/src/ci/docker/x86_64-gnu-debug/Dockerfile +++ b/src/ci/docker/x86_64-gnu-debug/Dockerfile @@ -13,14 +13,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gdb \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ --enable-debug \ diff --git a/src/ci/docker/x86_64-gnu-distcheck/Dockerfile b/src/ci/docker/x86_64-gnu-distcheck/Dockerfile index c3fe8ea51c..786f59eb9f 100644 --- a/src/ci/docker/x86_64-gnu-distcheck/Dockerfile +++ b/src/ci/docker/x86_64-gnu-distcheck/Dockerfile @@ -15,14 +15,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libssl-dev \ pkg-config -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu ENV SCRIPT python2.7 ../x.py test distcheck ENV DIST_SRC 1 diff --git a/src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile b/src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile index 377e086955..70390c2ac1 100644 --- a/src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile +++ b/src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile @@ -13,14 +13,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gdb \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ --enable-full-bootstrap diff --git a/src/ci/docker/x86_64-gnu-incremental/Dockerfile b/src/ci/docker/x86_64-gnu-incremental/Dockerfile index a21a99e965..d323677698 100644 --- a/src/ci/docker/x86_64-gnu-incremental/Dockerfile +++ b/src/ci/docker/x86_64-gnu-incremental/Dockerfile @@ -13,14 +13,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gdb \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu ENV RUSTFLAGS -Zincremental=/tmp/rust-incr-cache ENV RUST_CHECK_TARGET check diff --git a/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile b/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile index 1a9f1d2d73..e832a2445b 100644 --- a/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile +++ b/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile @@ -16,14 +16,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ zlib1g-dev \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ --llvm-root=/usr/lib/llvm-3.7 diff --git a/src/ci/docker/x86_64-gnu-nopt/Dockerfile b/src/ci/docker/x86_64-gnu-nopt/Dockerfile index e008b21f66..d2b0dd13dc 100644 --- a/src/ci/docker/x86_64-gnu-nopt/Dockerfile +++ b/src/ci/docker/x86_64-gnu-nopt/Dockerfile @@ -13,13 +13,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gdb \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-optimize-tests ENV RUST_CHECK_TARGET check diff --git a/src/ci/docker/x86_64-gnu/Dockerfile b/src/ci/docker/x86_64-gnu/Dockerfile index 3a6760b476..7570bca590 100644 --- a/src/ci/docker/x86_64-gnu/Dockerfile +++ b/src/ci/docker/x86_64-gnu/Dockerfile @@ -13,13 +13,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gdb \ xz-utils -COPY scripts/dumb-init.sh /scripts/ -RUN sh /scripts/dumb-init.sh - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -ENTRYPOINT ["/usr/bin/dumb-init", "--"] - ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-sanitizers --enable-profiler ENV SCRIPT python2.7 ../x.py test diff --git a/src/ci/run.sh b/src/ci/run.sh index 587639a0a2..388a9c80d7 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -31,7 +31,6 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-sccache" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-manage-submodules" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-locked-deps" RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-cargo-openssl-static" -RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-clean-rebuild" if [ "$DIST_SRC" = "" ]; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-dist-src" @@ -74,6 +73,12 @@ retry make prepare travis_fold end make-prepare travis_time_finish +travis_fold start check-bootstrap +travis_time_start +make check-bootstrap +travis_fold end check-bootstrap +travis_time_finish + if [ "$TRAVIS_OS_NAME" = "osx" ]; then ncpus=$(sysctl -n hw.ncpu) else diff --git a/src/doc/book/ci/build.sh b/src/doc/book/ci/build.sh index dfe69bf6c7..086ed57728 100644 --- a/src/doc/book/ci/build.sh +++ b/src/doc/book/ci/build.sh @@ -20,8 +20,14 @@ cargo run -- ../../second-edition/src cd ../.. -# tests for the second edition +# tests for the first edition +cd first-edition +mdbook test +mdbook build +cd .. + +# tests for the second edition cd second-edition bash spellcheck.sh list mdbook test diff --git a/src/doc/book/first-edition/src/lifetimes.md b/src/doc/book/first-edition/src/lifetimes.md index 042d9af971..8947deaadb 100644 --- a/src/doc/book/first-edition/src/lifetimes.md +++ b/src/doc/book/first-edition/src/lifetimes.md @@ -53,7 +53,7 @@ Uh oh! Your reference is pointing to an invalid resource. This is called a dangling pointer or ‘use after free’, when the resource is memory. A small example of such a situation would be: -```rust,compile_fail +```rust,ignore let r; // Introduce reference: `r`. { let i = 1; // Introduce scoped value: `i`. @@ -70,7 +70,7 @@ as it can see the lifetimes of the various values in the function. When we have a function that takes arguments by reference the situation becomes more complex. Consider the following example: -```rust,compile_fail,E0106 +```rust,ignore fn skip_prefix(line: &str, prefix: &str) -> &str { // ... # line diff --git a/src/doc/book/first-edition/src/structs.md b/src/doc/book/first-edition/src/structs.md index 27c653aeda..f11c1610db 100644 --- a/src/doc/book/first-edition/src/structs.md +++ b/src/doc/book/first-edition/src/structs.md @@ -255,7 +255,7 @@ rather than positions. You can define a `struct` with no members at all: -```rust,compile_fail,E0423 +```rust,ignore struct Electron {} // Use empty braces... struct Proton; // ...or just a semicolon. diff --git a/src/doc/book/second-edition/Cargo.toml b/src/doc/book/second-edition/Cargo.toml index 7ab2575fa5..bb6763509f 100644 --- a/src/doc/book/second-edition/Cargo.toml +++ b/src/doc/book/second-edition/Cargo.toml @@ -24,6 +24,10 @@ path = "tools/src/bin/remove_links.rs" name = "remove_markup" path = "tools/src/bin/remove_markup.rs" +[[bin]] +name = "convert_quotes" +path = "tools/src/bin/convert_quotes.rs" + [dependencies] walkdir = "0.1.5" docopt = "0.6.82" diff --git a/src/ci/docker/disabled/wasm32/node.sh b/src/doc/book/second-edition/convert-quotes.sh similarity index 76% rename from src/ci/docker/disabled/wasm32/node.sh rename to src/doc/book/second-edition/convert-quotes.sh index dfa7f221ff..229be88921 100755 --- a/src/ci/docker/disabled/wasm32/node.sh +++ b/src/doc/book/second-edition/convert-quotes.sh @@ -9,10 +9,14 @@ # option. This file may not be copied, modified, or distributed # except according to those terms. -path="$(dirname $1)" -file="$(basename $1)" +set -eu -shift +dir=$1 -cd "$path" -exec /node-v8.0.0-linux-x64/bin/node "$file" "$@" +mkdir -p "tmp/$dir" + +for f in $dir/*.md +do + cat "$f" | cargo run --bin convert_quotes > "tmp/$f" + mv "tmp/$f" "$f" +done diff --git a/src/doc/book/second-edition/dictionary.txt b/src/doc/book/second-edition/dictionary.txt index 3a9c7dcc0a..955cf66b6b 100644 --- a/src/doc/book/second-edition/dictionary.txt +++ b/src/doc/book/second-edition/dictionary.txt @@ -94,6 +94,7 @@ DraftPost DSTs ebooks Edsger +egular else's emoji encodings @@ -179,6 +180,7 @@ librarys libreoffice libstd lifecycle +lobally locators login lookup @@ -260,6 +262,7 @@ pushups QuitMessage RAII randcrate +README READMEs rect recurse @@ -278,6 +281,7 @@ request's resizes resizing retweet +rint ripgrep runtime runtimes @@ -287,6 +291,7 @@ rUsT rustc rustdoc rustup +screenshot searchstring SecondaryColor SelectBox @@ -368,6 +373,7 @@ unsafety unsized unsynchronized URIs +UsefulType username USERPROFILE usize @@ -393,5 +399,6 @@ Workspaces wouldn writeln WriteMessage +xpression yyyy ZipImpl diff --git a/src/doc/book/second-edition/nostarch/chapter01.md b/src/doc/book/second-edition/nostarch/chapter01.md index f4dc4150e9..6d2e2123a3 100644 --- a/src/doc/book/second-edition/nostarch/chapter01.md +++ b/src/doc/book/second-edition/nostarch/chapter01.md @@ -10,12 +10,12 @@ as programmers from languages like Python who are looking for ways to write more performant code without losing expressiveness. Rust provides the majority of its safety checks at compile time and without a -garbage collector so that your program's runtime isn't impacted. This makes it +garbage collector so that your program’s runtime isn’t impacted. This makes it useful in a number of use cases that other languages aren’t good at: embedding in other languages, programs with specific space and time requirements, and -writing low-level code, like device drivers and operating systems. It's also +writing low-level code, like device drivers and operating systems. It’s also great for web applications: it powers the Rust package registry site, crates.io! -We're excited to see what _you_ create with Rust. +We’re excited to see what _you_ create with Rust. This book is written for a reader who already knows how to program in at least one programming language. After reading this book, you should be comfortable @@ -35,15 +35,15 @@ connection to run the commands in this chapter, as we’ll be downloading Rust from the internet. We’ll be showing off a number of commands using a terminal, and those lines all -start with `$`. You don't need to type in the `$`s; they are there to indicate +start with `$`. You don’t need to type in the `$`s; they are there to indicate the start of each command. You’ll see many tutorials and examples around the web that follow this convention: `$` for commands run as a regular user, and `#` -for commands you should be running as an administrator. Lines that don't start +for commands you should be running as an administrator. Lines that don’t start with `$` are typically showing the output of the previous command. ### Installing on Linux or Mac -If you're on Linux or a Mac, all you need to do is open a terminal and type +If you’re on Linux or a Mac, all you need to do is open a terminal and type this: ```bash @@ -59,7 +59,7 @@ Rust is installed now. Great! ### Installing on Windows -If you're on Windows, please go to *https://rustup.rs/* and follow +If you’re on Windows, please go to *https://rustup.rs/* and follow the instructions to download rustup-init.exe. Run that and follow the rest of the instructions. @@ -79,7 +79,7 @@ $ rustup self uninstall ### Troubleshooting -If you've got Rust installed, you can open up a shell, and type this: +If you’ve got Rust installed, you can open up a shell, and type this: ```bash $ rustc --version @@ -95,14 +95,14 @@ rustc x.y.z (abcabcabc yyyy-mm-dd) If you see this, Rust has been installed successfully! Congrats! -If you don't and you're on Windows, check that Rust is in your `%PATH%` system +If you don’t and you’re on Windows, check that Rust is in your `%PATH%` system variable. -If it still isn't working, there are a number of places where you can get help. +If it still isn’t working, there are a number of places where you can get help. The easiest is the #rust IRC channel on irc.mozilla.org, which you can access through Mibbit at *http://chat.mibbit.com/?server=irc.mozilla.org&channel=%23rust*. Go to that -address, and you'll be chatting with other Rustaceans (a silly nickname we call +address, and you’ll be chatting with other Rustaceans (a silly nickname we call ourselves) who can help you out. Other great resources include the user’s forum at *https://users.rust-lang.org/* and Stack Overflow at *http://stackoverflow.com/questions/tagged/rust*. @@ -115,9 +115,9 @@ browser. ## Hello, World! -Now that you have Rust installed, let’s write your first Rust program. It's +Now that you have Rust installed, let’s write your first Rust program. It’s traditional when learning a new language to write a little program to print the -text “Hello, world!” to the screen, and in this section, we'll follow that +text “Hello, world!” to the screen, and in this section, we’ll follow that tradition. > Note: This book assumes basic familiarity with the command line. Rust itself @@ -127,8 +127,8 @@ tradition. ### Creating a Project File -First, make a file to put your Rust code in. Rust doesn't care where your code -lives, but for this book, we'd suggest making a *projects* directory in your +First, make a file to put your Rust code in. Rust doesn’t care where your code +lives, but for this book, we’d suggest making a *projects* directory in your home directory and keeping all your projects there. Open a terminal and enter the following commands to make a directory for this particular project: @@ -154,7 +154,7 @@ $ cd hello_world Next, make a new source file and call it `main.rs`. Rust files always end with the `.rs` extension. If you’re using more than one word in your filename, use -an underscore to separate them. For example, you'd use `hello_world.rs` rather +an underscore to separate them. For example, you’d use `hello_world.rs` rather than `helloworld.rs`. Now open the `main.rs` file you just created, and type the following code: @@ -178,13 +178,13 @@ Hello, world! On Windows, just replace `./main` with `.\main.exe`. Regardless of your operating system, you should see the string `Hello, world!` print to the -terminal. If you did, then congratulations! You've officially written a Rust +terminal. If you did, then congratulations! You’ve officially written a Rust program. That makes you a Rust programmer! Welcome. ### Anatomy of a Rust Program -Now, let’s go over what just happened in your "Hello, world!" program in -detail. Here's the first piece of the puzzle: +Now, let’s go over what just happened in your “Hello, world!” program in +detail. Here’s the first piece of the puzzle: ```rust fn main() { @@ -192,14 +192,14 @@ fn main() { } ``` -These lines define a *function* in Rust. The `main` function is special: it's +These lines define a *function* in Rust. The `main` function is special: it’s the first thing that is run for every executable Rust program. The first line says, “I’m declaring a function named `main` that takes no arguments and returns nothing.” If there were arguments, they would go inside the parentheses, `(` and `)`. Also note that the function body is wrapped in curly braces, `{` and `}`. Rust -requires these around all function bodies. It's considered good style to put +requires these around all function bodies. It’s considered good style to put the opening curly brace on the same line as the function declaration, with one space in between. @@ -215,7 +215,7 @@ that it’s indented with four spaces, not a tab. The second important part is `println!()`. This is calling a Rust *macro*, which is how metaprogramming is done in Rust. If it were calling a function -instead, it would look like this: `println()` (without the `!`). We'll discuss +instead, it would look like this: `println()` (without the `!`). We’ll discuss Rust macros in more detail in Chapter XX, but for now you just need to know that when you see a `!` that means that you’re calling a macro instead of a normal function. @@ -229,8 +229,8 @@ over, and the next one is ready to begin. Most lines of Rust code end with a ### Compiling and Running Are Separate Steps -In "Writing and Running a Rust Program", we showed you how to run a newly -created program. We'll break that process down and examine each step now. +In “Writing and Running a Rust Program”, we showed you how to run a newly +created program. We’ll break that process down and examine each step now. Before running a Rust program, you have to compile it. You can use the Rust compiler by entering the `rustc` command and passing it the name of your source @@ -240,7 +240,7 @@ file, like this: $ rustc main.rs ``` -If you come from a C or C++ background, you'll notice that this is similar to +If you come from a C or C++ background, you’ll notice that this is similar to `gcc` or `clang`. After compiling successfully, Rust should output a binary executable, which you can see on Linux or OSX by entering the `ls` command in your shell as follows: @@ -250,7 +250,7 @@ $ ls main main.rs ``` -On Windows, you'd enter: +On Windows, you’d enter: ```bash $ dir /B # the /B option says to only show the file names @@ -259,14 +259,14 @@ main.rs ``` This shows we have two files: the source code, with the `.rs` extension, and the -executable (`main.exe` on Windows, `main` everywhere else). All that's left to +executable (`main.exe` on Windows, `main` everywhere else). All that’s left to do from here is run the `main` or `main.exe` file, like this: ```bash $ ./main # or .\main.exe on Windows ``` -If `main.rs` were your "Hello, world!" program, this would print `Hello, +If `main.rs` were your “Hello, world!” program, this would print `Hello, world!` to your terminal. If you come from a dynamic language like Ruby, Python, or JavaScript, you may @@ -279,8 +279,8 @@ hand, they need to have a Ruby, Python, or JavaScript implementation installed program. Everything is a tradeoff in language design. Just compiling with `rustc` is fine for simple programs, but as your project -grows, you'll want to be able to manage all of the options your project has -and make it easy to share your code with other people and projects. Next, we'll +grows, you’ll want to be able to manage all of the options your project has +and make it easy to share your code with other people and projects. Next, we’ll introduce you to a tool called Cargo, which will help you write real-world Rust programs. @@ -292,8 +292,8 @@ Cargo takes care of building your code, downloading the libraries your code depends on, and building those libraries. We call libraries your code needs *dependencies*. -The simplest Rust programs, like the one we've written so far, don’t have any -dependencies, so right now, you'd only be using the part of Cargo that can take +The simplest Rust programs, like the one we’ve written so far, don’t have any +dependencies, so right now, you’d only be using the part of Cargo that can take care of building your code. As you write more complex Rust programs, you’ll want to add dependencies, and if you start off using Cargo, that will be a lot easier to do. @@ -314,7 +314,7 @@ installation to determine how to install Cargo separately. ### Creating a Project with Cargo -Let's create a new project using Cargo and look at how it differs from our +Let’s create a new project using Cargo and look at how it differs from our project in `hello_world`. Go back to your projects directory (or wherever you decided to put your code): @@ -339,7 +339,7 @@ $ cd hello_cargo We passed the `--bin` argument to `cargo new` because our goal is to make an executable application, as opposed to a library. Executables are often called -*binaries* (as in `/usr/bin`, if you’re on a Unix system). We've given +*binaries* (as in `/usr/bin`, if you’re on a Unix system). We’ve given `hello_cargo` as the name for our project, and Cargo creates its files in a directory of the same name that we can then go into. @@ -364,7 +364,7 @@ authors = ["Your Name "] [dependencies] ``` -This file is in the *TOML* (Tom's Obvious, Minimal Language) format. TOML is +This file is in the *TOML* (Tom’s Obvious, Minimal Language) format. TOML is similar to INI but has some extra goodies and is used as Cargo’s configuration format. @@ -380,11 +380,11 @@ file. The last line, `[dependencies]`, is the start of a section for you to list any *crates* (which is what we call packages of Rust code) that your project will -depend on so that Cargo knows to download and compile those too. We won't need +depend on so that Cargo knows to download and compile those too. We won’t need any other crates for this project, but we will in the guessing game tutorial in the next chapter. -Now let's look at `src/main.rs`: +Now let’s look at `src/main.rs`: Filename: src/main.rs @@ -394,9 +394,9 @@ fn main() { } ``` -Cargo has generated a "Hello World!" for you, just like the one we wrote +Cargo has generated a “Hello World!” for you, just like the one we wrote earlier! So that part is the same. The differences between our previous project -and the project generated by Cargo that we've seen so far are: +and the project generated by Cargo that we’ve seen so far are: 1. Our code goes in the `src` directory 2. The top level contains a `Cargo.toml` configuration file @@ -404,17 +404,17 @@ and the project generated by Cargo that we've seen so far are: Cargo expects your source files to live inside the `src` directory so that the top-level project directory is just for READMEs, license information, configuration files, and anything else not related to your code. In this way, -using Cargo helps you keep your projects nice and tidy. There's a place for +using Cargo helps you keep your projects nice and tidy. There’s a place for everything, and everything is in its place. -If you started a project that doesn't use Cargo, as we did with our project in +If you started a project that doesn’t use Cargo, as we did with our project in the `hello_world` directory, you can convert it to a project that does use Cargo by moving your code into the `src` directory and creating an appropriate `Cargo.toml`. ### Building and Running a Cargo Project -Now let's look at what's different about building and running your Hello World +Now let’s look at what’s different about building and running your Hello World program through Cargo! To do so, enter the following commands: ```bash @@ -444,8 +444,8 @@ version = "0.1.0" ``` Cargo uses the `Cargo.lock` file to keep track of dependencies in your -application. This project doesn't have dependencies, so the file is a bit -sparse. Realistically, you won't ever need to touch this file yourself; just +application. This project doesn’t have dependencies, so the file is a bit +sparse. Realistically, you won’t ever need to touch this file yourself; just let Cargo handle it. We just built a project with `cargo build` and ran it with @@ -458,7 +458,7 @@ $ cargo run Hello, world! ``` -Notice that this time, we didn't see the output telling us that Cargo was +Notice that this time, we didn’t see the output telling us that Cargo was compiling `hello_cargo`. Cargo figured out that the files haven’t changed, so it just ran the binary. If you had modified your source code, Cargo would have rebuilt the project before running it, and you would have seen something like @@ -471,7 +471,7 @@ $ cargo run Hello, world! ``` -So a few more differences we've now seen: +So a few more differences we’ve now seen: 3. Instead of using `rustc`, build a project using `cargo build` (or build and run it in one step with `cargo run`) @@ -479,7 +479,7 @@ So a few more differences we've now seen: code, Cargo will put it in the `target/debug` directory. The other advantage of using Cargo is that the commands are the same no matter -what operating system you're on, so at this point we will no longer be +what operating system you’re on, so at this point we will no longer be providing specific instructions for Linux and Mac versus Windows. ### Building for Release @@ -490,14 +490,14 @@ executable in `target/release` instead of `target/debug`. These optimizations make your Rust code run faster, but turning them on makes your program take longer to compile. This is why there are two different profiles: one for development when you want to be able to rebuild quickly and often, and one for -building the final program you’ll give to a user that won't be rebuilt and -that we want to run as fast as possible. If you're benchmarking the running +building the final program you’ll give to a user that won’t be rebuilt and +that we want to run as fast as possible. If you’re benchmarking the running time of your code, be sure to run `cargo build --release` and benchmark with the executable in `target/release`. ### Cargo as Convention -With simple projects, Cargo doesn't provide a whole lot of value over just +With simple projects, Cargo doesn’t provide a whole lot of value over just using `rustc`, but it will prove its worth as you continue. With complex projects composed of multiple crates, it’s much easier to let Cargo coordinate the build. With Cargo, you can just run `cargo build`, and it should work the diff --git a/src/doc/book/second-edition/nostarch/chapter02.md b/src/doc/book/second-edition/nostarch/chapter02.md index fb500b2e91..48cb31ff2e 100644 --- a/src/doc/book/second-edition/nostarch/chapter02.md +++ b/src/doc/book/second-edition/nostarch/chapter02.md @@ -399,9 +399,9 @@ the project with the dependencies available. If you immediately run `cargo build` again without making any changes, you won’t get any output. Cargo knows it has already downloaded and compiled the -dependencies, and you haven't changed anything about them in your *Cargo.toml* -file. Cargo also knows that you haven't changed anything about your code, so it -doesn't recompile that either. With nothing to do, it simply exits. If you open +dependencies, and you haven’t changed anything about them in your *Cargo.toml* +file. Cargo also knows that you haven’t changed anything about your code, so it +doesn’t recompile that either. With nothing to do, it simply exits. If you open up the *src/main.rs* file, make a trivial change, then save it and build again, you’ll only see one line of output: @@ -411,7 +411,7 @@ $ cargo build ``` This line shows Cargo only updates the build with your tiny change to the -*src/main.rs* file. Your dependencies haven't changed, so Cargo knows it can +*src/main.rs* file. Your dependencies haven’t changed, so Cargo knows it can reuse what it has already downloaded and compiled for those. It just rebuilds your part of the code. diff --git a/src/doc/book/second-edition/nostarch/chapter03.md b/src/doc/book/second-edition/nostarch/chapter03.md index e9c7d752a1..04fde81d07 100644 --- a/src/doc/book/second-edition/nostarch/chapter03.md +++ b/src/doc/book/second-edition/nostarch/chapter03.md @@ -136,12 +136,12 @@ immutable variables, constants are also values that are bound to a name and are not allowed to change, but there are a few differences between constants and variables. -First, we aren’t allowed to use `mut` with constants: constants aren't only -immutable by default, they're always immutable. +First, we aren’t allowed to use `mut` with constants: constants aren’t only +immutable by default, they’re always immutable. We declare constants using the `const` keyword instead of the `let` keyword, -and the type of the value *must* be annotated. We're about to cover types and -type annotations in the next section, “Data Types,” so don't worry about the +and the type of the value *must* be annotated. We’re about to cover types and +type annotations in the next section, “Data Types,” so don’t worry about the details right now, just know that we must always annotate the type. Constants can be declared in any scope, including the global scope, which makes @@ -151,7 +151,7 @@ The last difference is that constants may only be set to a constant expression, not the result of a function call or any other value that could only be computed at runtime. -Here's an example of a constant declaration where the constant's name is +Here’s an example of a constant declaration where the constant’s name is `MAX_POINTS` and its value is set to 100,000. (Rust constant naming convention is to use all upper case with underscores between words): @@ -678,11 +678,11 @@ called and its message is printed. ### Function Parameters Functions can also be defined to have *parameters*, which are special variables -that are part of a function's signature. When a function has parameters, we can +that are part of a function’s signature. When a function has parameters, we can provide it with concrete values for those parameters. Technically, the concrete values are called *arguments*, but in casual conversation people tend to use the words “parameter” and “argument” interchangeably for either the variables -in a function's definition or the concrete values passed in when you call a +in a function’s definition or the concrete values passed in when you call a function. The following rewritten version of `another_function` shows what parameters @@ -737,7 +737,7 @@ fn another_function(x: i32, y: i32) { This example creates a function with two parameters, both of which are `i32` types. The function then prints out the values in both of its parameters. Note -that function parameters don't all need to be the same type, they just happen +that function parameters don’t all need to be the same type, they just happen to be in this example. Let’s try running this code. Replace the program currently in your *function* @@ -1003,7 +1003,7 @@ fn main() { ``` But you’ll more often see them used in this format, with the comment on a -separate line above the code it's annotating: +separate line above the code it’s annotating: Filename: src/main.rs diff --git a/src/doc/book/second-edition/nostarch/chapter04.md b/src/doc/book/second-edition/nostarch/chapter04.md index aa92aa7b7d..da77abb8ec 100644 --- a/src/doc/book/second-edition/nostarch/chapter04.md +++ b/src/doc/book/second-edition/nostarch/chapter04.md @@ -244,7 +244,7 @@ of scope, Rust calls a special function for us. This function is called `drop`, and it’s where the author of `String` can put the code to return the memory. Rust calls `drop` automatically at the closing `}`. -> Note: In C++, this pattern of deallocating resources at the end of an item's +> Note: In C++, this pattern of deallocating resources at the end of an item’s lifetime is sometimes called *Resource Acquisition Is Initialization (RAII)*. The `drop` function in Rust will be familiar to you if you’ve used RAII patterns. @@ -628,7 +628,7 @@ fn calculate_length(s: &String) -> usize { // s is a reference to a String ``` The scope in which the variable `s` is valid is the same as any function -argument's scope, but we don’t drop what the reference points to when it goes +argument’s scope, but we don’t drop what the reference points to when it goes out of scope because we don’t have ownership. Functions that take references as arguments instead of the actual values mean we won’t need to return the values in order to give back ownership, since we never had ownership. diff --git a/src/doc/book/second-edition/nostarch/chapter05.md b/src/doc/book/second-edition/nostarch/chapter05.md index b6f35856b8..0e766dfdf6 100644 --- a/src/doc/book/second-edition/nostarch/chapter05.md +++ b/src/doc/book/second-edition/nostarch/chapter05.md @@ -6,9 +6,9 @@ A *struct*, or *structure*, is a custom data type that lets us name and package together multiple related values that make up a meaningful group. If you’re familiar with an object-oriented language, a *struct* is like an object’s data -attributes. In this chapter, we'll compare and contrast tuples with structs, +attributes. In this chapter, we’ll compare and contrast tuples with structs, demonstrate how to use structs, and discuss how to define methods and -associated functions on structs to specify behavior associated with a struct's +associated functions on structs to specify behavior associated with a struct’s data. The struct and *enum* (which is discussed in Chapter 6) concepts are the building blocks for creating new types in your program’s domain to take full advantage of Rust’s compile time type checking. @@ -27,7 +27,7 @@ grouped together. Then, inside curly braces, we define the names and types of the pieces of data, which we call *fields*. For example, Listing 5-1 shows a struct to store information about a user account: -```rust +``` struct User { username: String, email: String, @@ -48,7 +48,7 @@ struct definition is like a general template for the type, and instances fill in that template with particular data to create values of the type. For example, we can declare a particular user as shown in Listing 5-2: -```rust +``` let user1 = User { email: String::from("someone@example.com"), username: String::from("someusername123"), @@ -78,15 +78,13 @@ user1.email = String::from("anotheremail@example.com"); Listing 5-3: Changing the value in the `email` field of a `User` instance -### Field Init Shorthand when Variables Have the Same Name as Fields +Like any expression, we can implicitly return a new instance of a struct from a +function by constructing the new instance as the last expression in the +function body. Listing 5-4 shows a `build_user` function that returns a `User` +instance with the given `email` and `username`. The `active` field gets the +value of `true`, and the `sign_in_count` gets a value of `1`. -If you have variables with the same names as struct fields, you can use *field -init shorthand*. This can make functions that create new instances of structs -more concise. The function named `build_user` shown here in Listing 5-4 has -parameters named `email` and `username`. The function creates and returns a -`User` instance: - -```rust +``` fn build_user(email: String, username: String) -> User { User { email: email, @@ -100,14 +98,26 @@ fn build_user(email: String, username: String) -> User { Listing 5-4: A `build_user` function that takes an email and username and returns a `User` instance -Because the parameter names `email` and `username` are the same as the `User` -struct's field names `email` and `username`, we can write `build_user` without -the repetition of `email` and `username` as shown in Listing 5-5. This version -of `build_user` behaves the same way as the one in Listing 5-4. The field init -syntax can make cases like this shorter to write, especially when structs have -many fields. +Repeating the `email` field name and `email` variable, and the same for +`username`, is a bit tedious, though. It makes sense to name the function +arguments with the same name as the struct fields, but if the struct had more +fields, repeating each name would get even more annoying. Luckily, there's a +convenient shorthand! -```rust +### Field Init Shorthand when Variables Have the Same Name as Fields + +If you have variables with the same names as struct fields, you can use *field +init shorthand*. This can make functions that create new instances of structs +more concise. + +In Listing 5-4, the parameter names `email` and `username` are the same as the +`User` struct’s field names `email` and `username`. Because the names are +exactly the same, we can write `build_user` without the repetition of `email` +and `username` as shown in Listing 5-5. This version of `build_user` behaves +the same way as the one in Listing 5-4. The field init syntax can make cases +like this shorter to write, especially when structs have many fields. + +``` fn build_user(email: String, username: String) -> User { User { email, @@ -123,13 +133,13 @@ Listing 5-5: A `build_user` function that uses field init syntax since the ### Creating Instances From Other Instances With Struct Update Syntax -It's often useful to create a new instance from an old instance, using most of -the old instance's values but changing some. Listing 5-6 shows an example of +It’s often useful to create a new instance from an old instance, using most of +the old instance’s values but changing some. Listing 5-6 shows an example of creating a new `User` instance in `user2` by setting the values of `email` and `username` but using the same values for the rest of the fields from the `user1` instance we created in Listing 5-2: -```rust +``` let user2 = User { email: String::from("another@example.com"), username: String::from("anotherusername567"), @@ -141,14 +151,14 @@ let user2 = User { Listing 5-6: Creating a new `User` instance, `user2`, and setting some fields to the values of the same fields from `user1` - The *struct update syntax* achieves the same effect as the code in Listing -5-6 using less code. The struct update syntax uses `..` to specify that the +The *struct update syntax* achieves the same effect as the code in Listing 5-6 +using less code. The struct update syntax uses `..` to specify that the remaining fields not set explicitly should have the same value as the fields in the given instance. The code in Listing 5-7 also creates an instance in `user2` that has a different value for `email` and `username` but has the same values for the `active` and `sign_in_count` fields that `user1` has: -```rust +``` let user2 = User { email: String::from("another@example.com"), username: String::from("anotherusername567"), @@ -163,13 +173,13 @@ the instance in the `user1` variable ### Tuple Structs without Named Fields to Create Different Types We can also define structs that look similar to tuples, called *tuple structs*, -that have the added meaning the struct name provides, but don't have names +that have the added meaning the struct name provides, but don’t have names associated with their fields, just the types of the fields. The definition of a tuple struct still starts with the `struct` keyword and the struct name, which are followed by the types in the tuple. For example, here are definitions and usages of tuple structs named `Color` and `Point`: -```rust +``` struct Color(i32, i32, i32); struct Point(i32, i32, i32); @@ -177,27 +187,27 @@ let black = Color(0, 0, 0); let origin = Point(0, 0, 0); ``` -Note that the `black` and `origin` values are different types, since they're +Note that the `black` and `origin` values are different types, since they’re instances of different tuple structs. Each struct we define is its own type, even though the fields within the struct have the same types. Otherwise, tuple struct instances behave like tuples, which we covered in Chapter 3. ### Unit-Like Structs without Any Fields -We can also define structs that don't have any fields! These are called +We can also define structs that don’t have any fields! These are called *unit-like structs* since they behave similarly to `()`, the unit type. Unit-like structs can be useful in situations such as when you need to -implement a trait on some type, but you don't have any data that you want to -store in the type itself. We'll be discussing traits in Chapter 10. +implement a trait on some type, but you don’t have any data that you want to +store in the type itself. We’ll be discussing traits in Chapter 10. PROD: START BOX ### Ownership of Struct Data -In the `User` struct definition in Listing 5-1, we used the owned `String` type -rather than the `&str` string slice type. This is a deliberate choice because -we want instances of this struct to own all of its data and for that data to be -valid for as long as the entire struct is valid. +In the `User` struct definition in Listing 5-1, we used the owned `String` +type rather than the `&str` string slice type. This is a deliberate choice +because we want instances of this struct to own all of its data and for that +data to be valid for as long as the entire struct is valid. It’s possible for structs to store references to data owned by something else, but to do so requires the use of *lifetimes*, a Rust feature that is discussed @@ -207,7 +217,7 @@ struct without specifying lifetimes, like this: Filename: src/main.rs -```rust,ignore +``` struct User { username: &str, email: &str, @@ -227,7 +237,7 @@ fn main() { The compiler will complain that it needs lifetime specifiers: -```text +``` error[E0106]: missing lifetime specifier --> | @@ -241,8 +251,8 @@ error[E0106]: missing lifetime specifier | ^ expected lifetime parameter ``` -We’ll discuss how to fix these errors so you can store references in structs in -Chapter 10, but for now, we’ll fix errors like these using owned types like +We’ll discuss how to fix these errors so you can store references in structs +in Chapter 10, but for now, we’ll fix errors like these using owned types like `String` instead of references like `&str`. PROD: END BOX @@ -260,7 +270,7 @@ just that in our project’s *src/main.rs*: Filename: src/main.rs -```rust +``` fn main() { let length1 = 50; let width1 = 30; @@ -281,7 +291,7 @@ width in separate variables Now, run this program using `cargo run`: -```text +``` The area of the rectangle is 1500 square pixels. ``` @@ -294,7 +304,7 @@ rectangle. The issue with this method is evident in the signature of `area`: -```rust +``` fn area(length: u32, width: u32) -> u32 { ``` @@ -308,7 +318,7 @@ uses tuples: Filename: src/main.rs -```rust +``` fn main() { let rect1 = (50, 30); @@ -346,7 +356,7 @@ parts, as shown in Listing 5-10: Filename: src/main.rs -```rust +``` struct Rectangle { length: u32, width: u32, @@ -381,11 +391,11 @@ using `rect1`, which is the reason we use the `&` in the function signature and where we call the function. The `area` function accesses the `length` and `width` fields of the `Rectangle` -instance. Our function signature for `area` now indicates exactly what we -mean: calculate the area of a `Rectangle` using its `length` and `width` -fields. This conveys that the length and width are related to each other, and -gives descriptive names to the values rather than using the tuple index values -of `0` and `1`—a win for clarity. +instance. Our function signature for `area` now indicates exactly what we mean: +calculate the area of a `Rectangle` using its `length` and `width` fields. This +conveys that the length and width are related to each other, and gives +descriptive names to the values rather than using the tuple index values of `0` +and `1`—a win for clarity. ### Adding Useful Functionality with Derived Traits @@ -396,7 +406,7 @@ chapters: Filename: src/main.rs -```rust,ignore +``` struct Rectangle { length: u32, width: u32, @@ -413,7 +423,7 @@ Listing 5-11: Attempting to print a `Rectangle` instance When we run this code, we get an error with this core message: -```text +``` error[E0277]: the trait bound `Rectangle: std::fmt::Display` is not satisfied ``` @@ -429,7 +439,7 @@ want and structs don’t have a provided implementation of `Display`. If we continue reading the errors, we’ll find this helpful note: -```text +``` note: `Rectangle` cannot be formatted with the default formatter; try using `:?` instead if you are using a format string ``` @@ -442,13 +452,13 @@ its value while we’re debugging our code. Run the code with this change. Drat! We still get an error: -```text +``` error: the trait bound `Rectangle: std::fmt::Debug` is not satisfied ``` But again, the compiler gives us a helpful note: -```text +``` note: `Rectangle` cannot be formatted using `:?`; if it is defined in your crate, add `#[derive(Debug)]` or manually implement it ``` @@ -460,7 +470,7 @@ definition, as shown in Listing 5-12: Filename: src/main.rs -```rust +``` #[derive(Debug)] struct Rectangle { length: u32, @@ -480,7 +490,7 @@ the `Rectangle` instance using debug formatting Now when we run the program, we won’t get any errors and we’ll see the following output: -```text +``` rect1 is Rectangle { length: 50, width: 30 } ``` @@ -490,7 +500,7 @@ larger structs, it’s useful to have output that’s a bit easier to read; in those cases, we can use `{:#?}` instead of `{:?}` in the `println!` string. When we use the `{:#?}` style in the example, the output will look like this: -```text +``` rect1 is Rectangle { length: 50, width: 30 @@ -504,7 +514,7 @@ with custom behavior as well as how to create your own traits in Chapter 10. Our `area` function is very specific: it only computes the area of rectangles. It would be helpful to tie this behavior more closely to our `Rectangle` -struct, because it won't work with any other type. Let’s look at how we can +struct, because it won’t work with any other type. Let’s look at how we can continue to refactor this code by turning the `area` function into an `area` *method* defined on our `Rectangle` type. @@ -526,7 +536,7 @@ in Listing 5-13: Filename: src/main.rs -```rust +``` #[derive(Debug)] struct Rectangle { length: u32, @@ -601,7 +611,7 @@ Here’s how it works: when you call a method with `object.something()`, Rust automatically adds in `&`, `&mut`, or `*` so `object` matches the signature of the method. In other words, the following are the same: -```rust +``` p1.distance(&p2); (&p1).distance(&p2); ``` @@ -609,9 +619,9 @@ p1.distance(&p2); The first one looks much cleaner. This automatic referencing behavior works because methods have a clear receiver—the type of `self`. Given the receiver and name of a method, Rust can figure out definitively whether the method is -reading (`&self`), mutating (`&mut self`), or consuming (`self`). The fact that -Rust makes borrowing implicit for method receivers is a big part of making -ownership ergonomic in practice. +reading (`&self`), mutating (`&mut self`), or consuming (`self`). The fact +that Rust makes borrowing implicit for method receivers is a big part of +making ownership ergonomic in practice. PROD: END BOX @@ -626,7 +636,7 @@ method: Filename: src/main.rs -```rust +``` fn main() { let rect1 = Rectangle { length: 50, width: 30 }; let rect2 = Rectangle { length: 40, width: 10 }; @@ -643,7 +653,7 @@ And the expected output would look like the following, because both dimensions of `rect2` are smaller than the dimensions of `rect1`, but `rect3` is wider than `rect1`: -```text +``` Can rect1 hold rect2? true Can rect1 hold rect3? false ``` @@ -664,7 +674,7 @@ Listing 5-13, shown in Listing 5-15: Filename: src/main.rs -```rust +``` impl Rectangle { fn area(&self) -> u32 { self.length * self.width @@ -701,7 +711,7 @@ specify the same value twice: Filename: src/main.rs -```rust +``` impl Rectangle { fn square(size: u32) -> Rectangle { Rectangle { length: size, width: size } @@ -710,9 +720,35 @@ impl Rectangle { ``` To call this associated function, we use the `::` syntax with the struct name, -like `let sq = Rectangle::square(3);`, for example. This function is namespaced -by the struct: the `::` syntax is used for both associated functions and -namespaces created by modules, which we'll discuss in Chapter 7. +like `let sq = Rectangle::square(3);`, for example. This function is +namespaced by the struct: the `::` syntax is used for both associated functions +and namespaces created by modules, which we’ll discuss in Chapter 7. + +### Multiple `impl` Blocks + +Each struct is allowed to have multiple `impl` blocks. For example, Listing +5-15 is equivalent to the code shown in Listing 5-16, which has each method +in its own `impl` block: + +``` +impl Rectangle { + fn area(&self) -> u32 { + self.length * self.width + } +} + +impl Rectangle { + fn can_hold(&self, other: &Rectangle) -> bool { + self.length > other.length && self.width > other.width + } +} +``` + +Listing 5-16: Rewriting Listing 5-15 using multiple `impl` blocks + +There’s no reason to separate these methods into multiple `impl` blocks here, +but it’s valid syntax. We will see a case when multiple `impl` blocks are useful +in Chapter 10 when we discuss generic types and traits. ## Summary diff --git a/src/doc/book/second-edition/nostarch/chapter06.md b/src/doc/book/second-edition/nostarch/chapter06.md index 914fe3dc36..b553251a61 100644 --- a/src/doc/book/second-edition/nostarch/chapter06.md +++ b/src/doc/book/second-edition/nostarch/chapter06.md @@ -275,7 +275,7 @@ null, has this to say: > I call it my billion-dollar mistake. At that time, I was designing the first > comprehensive type system for references in an object-oriented language. My > goal was to ensure that all use of references should be absolutely safe, with -> checking performed automatically by the compiler. But I couldn't resist the +> checking performed automatically by the compiler. But I couldn’t resist the > temptation to put in a null reference, simply because it was so easy to > implement. This has led to innumerable errors, vulnerabilities, and system > crashes, which have probably caused a billion dollars of pain and damage in @@ -321,7 +321,7 @@ let absent_number: Option = None; ``` If we use `None` rather than `Some`, we need to tell Rust what type of -`Option` we have, because the compiler can't infer the type that the `Some` +`Option` we have, because the compiler can’t infer the type that the `Some` variant will hold by looking only at a `None` value. When we have a `Some` value, we know that a value is present, and the value is @@ -488,7 +488,7 @@ From 1999 through 2008, the United States printed quarters with different designs for each of the 50 states on one side. No other coins got state designs, so only quarters have this extra value. We can add this information to our `enum` by changing the `Quarter` variant to include a `State` value stored -inside it, which we've done here in Listing 6-4: +inside it, which we’ve done here in Listing 6-4: ```rust #[derive(Debug)] // So we can inspect the state in a minute diff --git a/src/doc/book/second-edition/nostarch/chapter07.md b/src/doc/book/second-edition/nostarch/chapter07.md index 8bc1942a91..5392b4cecc 100644 --- a/src/doc/book/second-edition/nostarch/chapter07.md +++ b/src/doc/book/second-edition/nostarch/chapter07.md @@ -34,8 +34,6 @@ people can pull into their projects as a dependency. For example, the `rand` crate in Chapter 2 is a library crate that we used as a dependency in the guessing game project. -Prod: Check xref - We’ll create a skeleton of a library that provides some general networking functionality; we’ll concentrate on the organization of the modules and functions but we won’t worry about what code goes in the function bodies. We’ll @@ -69,8 +67,6 @@ at the `#[]` and `mod tests` syntax in the “Using `super` to Access a Parent Module” section later in this chapter, but for now, leave this code at the bottom of *src/lib.rs*. -Prod: Check xref - Because we don’t have a *src/main.rs* file, there’s nothing for Cargo to execute with the `cargo run` command. Therefore, we’ll use the `cargo build` command to compile our library crate’s code. @@ -129,7 +125,7 @@ not conflict with each other because they’re in different modules. In this case, because we’re building a library, the file that serves as the entry point for building our library is *src/lib.rs*. However, in respect to creating modules, there’s nothing special about *src/lib.rs*. We could also -create modules in *src/main.rs* for a binary crate in the same way as we're +create modules in *src/main.rs* for a binary crate in the same way as we’re creating modules in *src/lib.rs* for the library crate. In fact, we can put modules inside of modules, which can be useful as your modules grow to keep related functionality organized together and separate functionality apart. The @@ -233,8 +229,8 @@ lines of code inside the functions will start getting lengthy as well. These would be good reasons to separate the `client`, `network`, and `server` modules from *src/lib.rs* and place them into their own files. -First, replace the `client` module code with only the declaration of the -`client` module, so that your *src/lib.rs* looks like the following: +First, replace the `client` module code with only the declaration of the `client` +module, so that your *src/lib.rs* looks like the following: Filename: src/lib.rs @@ -317,8 +313,6 @@ about these warnings for now; we’ll address them in the “Controlling Visibil with `pub`” section later in this chapter. The good news is that they’re just warnings; our project built successfully! -Prod: Check xref - Next, let’s extract the `network` module into its own file using the same pattern. In *src/lib.rs*, delete the body of the `network` module and add a semicolon to the declaration, like so: @@ -632,8 +626,8 @@ error: function `connect` is private ``` Hooray! We have a different error! Yes, different error messages are a cause -for celebration. The new error shows `` function `connect` is private ``, so let’s -edit *src/client.rs* to make `client::connect` public too: +for celebration. The new error shows `` function `connect` is private ``, so +let’s edit *src/client.rs* to make `client::connect` public too: Filename: src/client.rs @@ -729,8 +723,8 @@ Only one warning is left! Try to fix this one on your own! Overall, these are the rules for item visibility: 1. If an item is public, it can be accessed through any of its parent modules. -2. If an item is private, it can be accessed only by the current module and its - child modules. +2. If an item is private, it can be accessed only by its immediate parent + module and any of the parent’s child modules. ### Privacy Examples @@ -909,8 +903,8 @@ fn main() { } ``` -We're still specifying the `TrafficLight` namespace for the `Green` variant -because we didn't include `Green` in the `use` statement. +We’re still specifying the `TrafficLight` namespace for the `Green` variant +because we didn’t include `Green` in the `use` statement. ### Glob Imports with `*` @@ -964,8 +958,6 @@ and contains one function named `it_works`. Even though there are special annotations, the `tests` module is just another module! So our module hierarchy looks like this: -Prod: Check xref - ``` communicator ├── client diff --git a/src/doc/book/second-edition/nostarch/chapter08.md b/src/doc/book/second-edition/nostarch/chapter08.md index b8a260cded..1d88f47e1d 100644 --- a/src/doc/book/second-edition/nostarch/chapter08.md +++ b/src/doc/book/second-edition/nostarch/chapter08.md @@ -16,7 +16,7 @@ collections which are used very often in Rust programs: * A *vector* allows us to store a variable number of values next to each other. * A *string* is a collection of characters. We’ve seen the `String` type before, but we’ll talk about it in depth now. -* A *hash map* allows us to associate a value with a particular key. It's a +* A *hash map* allows us to associate a value with a particular key. It’s a particular implementation of the more general data structure called a *map*. To learn about the other kinds of collections provided by the standard library, @@ -408,7 +408,7 @@ fn add(self, s: &str) -> String { This isn’t the exact signature that’s in the standard library; there `add` is defined using generics. Here, we’re looking at the signature of `add` with concrete types substituted for the generic ones, which is what happens when we -call this method with `String` values. We'll be discussing generics in Chapter +call this method with `String` values. We’ll be discussing generics in Chapter 10. This signature gives us the clues we need to understand the tricky bits of the `+` operator. @@ -567,7 +567,7 @@ to the index to determine how many valid characters there were. ### Slicing Strings -Because it's not clear what the return type of string indexing should be, and +Because it’s not clear what the return type of string indexing should be, and it is often a bad idea to index into a string, Rust dissuades you from doing so by asking you to be more specific if you really need it. The way you can be more specific than indexing using `[]` with a single number is using `[]` with @@ -902,7 +902,7 @@ with the drop in performance is worth it. If you profile your code and find that the default hash function is too slow for your purposes, you can switch to another function by specifying a different *hasher*. A hasher is a type that implements the `BuildHasher` trait. We’ll be talking about traits and how to -implement them in Chapter 10. You don't necessarily have to implement your own +implement them in Chapter 10. You don’t necessarily have to implement your own hasher from scratch; crates.io has libraries that others have shared that provide hashers implementing many common hashing algorithms. diff --git a/src/doc/book/second-edition/nostarch/chapter09.md b/src/doc/book/second-edition/nostarch/chapter09.md index 363ff26b53..f7921e1c65 100644 --- a/src/doc/book/second-edition/nostarch/chapter09.md +++ b/src/doc/book/second-edition/nostarch/chapter09.md @@ -136,7 +136,7 @@ the `panic!` is actually happening. The next note line tells us that we can set the `RUST_BACKTRACE` environment variable to get a backtrace of exactly what happened to cause the error. Let’s -try that. Listing 9-1 shows output similar to what you'll see: +try that. Listing 9-1 shows output similar to what you’ll see:
diff --git a/src/doc/book/second-edition/nostarch/chapter10.md b/src/doc/book/second-edition/nostarch/chapter10.md index cc8553086a..c1ac9544e4 100644 --- a/src/doc/book/second-edition/nostarch/chapter10.md +++ b/src/doc/book/second-edition/nostarch/chapter10.md @@ -7,40 +7,40 @@ Every programming language has tools to deal effectively with duplication of concepts; in Rust, one of those tools is *generics*. Generics are abstract stand-ins for concrete types or other properties. We can express properties of generics, such as their behavior or how they relate to other generics, without -needing to know when we're writing and compiling the code what will actually be +needing to know when we’re writing and compiling the code what will actually be in their place. -In the same way that a function takes parameters whose value we don't know in +In the same way that a function takes parameters whose value we don’t know in order to write code once that will be run on multiple concrete values, we can write functions that take parameters of some generic type instead of a concrete -type like `i32` or `String`. We've already used generics in Chapter 6 with +type like `i32` or `String`. We’ve already used generics in Chapter 6 with `Option`, Chapter 8 with `Vec` and `HashMap`, and Chapter 9 with -`Result`. In this chapter, we'll explore how to define our own types, +`Result`. In this chapter, we’ll explore how to define our own types, functions, and methods with generics! -First, we're going to review the mechanics of extracting a function that -reduces code duplication. Then we'll use the same mechanics to make a generic +First, we’re going to review the mechanics of extracting a function that +reduces code duplication. Then we’ll use the same mechanics to make a generic function out of two functions that only differ in the types of their -parameters. We'll go over using generic types in struct and enum definitions +parameters. We’ll go over using generic types in struct and enum definitions too. -After that, we'll discuss *traits*, which are a way to define behavior in a +After that, we’ll discuss *traits*, which are a way to define behavior in a generic way. Traits can be combined with generic types in order to constrain a generic type to those types that have a particular behavior, rather than any type at all. -Finally, we'll discuss *lifetimes*, which are a kind of generic that let us +Finally, we’ll discuss *lifetimes*, which are a kind of generic that let us give the compiler information about how references are related to each other. Lifetimes are the feature in Rust that allow us to borrow values in many situations and still have the compiler check that references will be valid. ## Removing Duplication by Extracting a Function -Before getting into generics syntax, let's first review a technique for dealing -with duplication that doesn't use generic types: extracting a function. Once -that's fresh in our minds, we'll use the same mechanics with generics to +Before getting into generics syntax, let’s first review a technique for dealing +with duplication that doesn’t use generic types: extracting a function. Once +that’s fresh in our minds, we’ll use the same mechanics with generics to extract a generic function! In the same way that you recognize duplicated code -to extract into a function, you'll start to recognize duplicated code that can +to extract into a function, you’ll start to recognize duplicated code that can use generics. Consider a small program that finds the largest number in a list, shown in @@ -123,7 +123,7 @@ to the function in a parameter. This will increase the clarity of our code and let us communicate and reason about the concept of finding the largest number in a list independently of the specific places this concept is used. -In the program in Listing 10-3, we've extracted the code that finds the largest +In the program in Listing 10-3, we’ve extracted the code that finds the largest number into a function named `largest`. This program can find the largest number in two different lists of numbers, but the code from Listing 10-1 only exists in one spot: @@ -182,14 +182,14 @@ ways. What if we had two functions, one that found the largest item in a slice of `i32` values and one that found the largest item in a slice of `char` values? -How would we get rid of that duplication? Let's find out! +How would we get rid of that duplication? Let’s find out! ## Generic Data Types Using generics where we usually place types, like in function signatures or structs, lets us create definitions that we can use for many different concrete -data types. Let's take a look at how to define functions, structs, enums, and -methods using generics, and at the end of this section we'll discuss the +data types. Let’s take a look at how to define functions, structs, enums, and +methods using generics, and at the end of this section we’ll discuss the performance of code using generics. ### Using Generic Data Types in Function Definitions @@ -252,13 +252,13 @@ so it would be nice if we could turn these two functions into one and get rid of the duplication. Luckily, we can do that by introducing a generic type parameter! -To parameterize the types in the signature of the one function we're going to +To parameterize the types in the signature of the one function we’re going to define, we need to create a name for the type parameter, just like how we give -names for the value parameters to a function. We're going to choose the name -`T`. Any identifier can be used as a type parameter name, but we're choosing -`T` because Rust's type naming convention is CamelCase. Generic type parameter +names for the value parameters to a function. We’re going to choose the name +`T`. Any identifier can be used as a type parameter name, but we’re choosing +`T` because Rust’s type naming convention is CamelCase. Generic type parameter names also tend to be short by convention, often just one letter. Short for -"type", `T` is the default choice of most Rust programmers. +“type”, `T` is the default choice of most Rust programmers. When we use a parameter in the body of the function, we have to declare the parameter in the signature so that the compiler knows what that name in the @@ -267,7 +267,7 @@ signature, we have to declare the type parameter name before we use it. Type name declarations go in angle brackets between the name of the function and the parameter list. -The function signature of the generic `largest` function we're going to define +The function signature of the generic `largest` function we’re going to define will look like this: ``` @@ -279,8 +279,8 @@ has one parameter named `list`, and the type of `list` is a slice of values of type `T`. The `largest` function will return a value of the same type `T`. Listing 10-5 shows the unified `largest` function definition using the generic -data type in its signature, and shows how we'll be able to call `largest` with -either a slice of `i32` values or `char` values. Note that this code won't +data type in its signature, and shows how we’ll be able to call `largest` with +either a slice of `i32` values or `char` values. Note that this code won’t compile yet! Filename: src/main.rs @@ -312,9 +312,9 @@ fn main() { ``` Listing 10-5: A definition of the `largest` function that uses generic type -parameters but doesn't compile yet +parameters but doesn’t compile yet -If we try to compile this code right now, we'll get this error: +If we try to compile this code right now, we’ll get this error: ``` error[E0369]: binary operation `>` cannot be applied to type `T` @@ -325,14 +325,14 @@ error[E0369]: binary operation `>` cannot be applied to type `T` note: an implementation of `std::cmp::PartialOrd` might be missing for `T` ``` -The note mentions `std::cmp::PartialOrd`, which is a *trait*. We're going to +The note mentions `std::cmp::PartialOrd`, which is a *trait*. We’re going to talk about traits in the next section, but briefly, what this error is saying -is that the body of `largest` won't work for all possible types that `T` could +is that the body of `largest` won’t work for all possible types that `T` could be; since we want to compare values of type `T` in the body, we can only use types that know how to be ordered. The standard library has defined the trait -`std::cmp::PartialOrd` that types can implement to enable comparisons. We'll +`std::cmp::PartialOrd` that types can implement to enable comparisons. We’ll come back to traits and how to specify that a generic type has a particular -trait in the next section, but let's set this example aside for a moment and +trait in the next section, but let’s set this example aside for a moment and explore other places we can use generic type parameters first. ### Using Generic Data Types in Struct Definitions We can define structs to use a generic type parameter in one or more of the -struct's fields with the `<>` syntax too. Listing 10-6 shows the definition and +struct’s fields with the `<>` syntax too. Listing 10-6 shows the definition and use of a `Point` struct that can hold `x` and `y` coordinate values of any type: Filename: src/main.rs @@ -370,11 +370,11 @@ to declare the name of the type parameter within angle brackets just after the name of the struct. Then we can use the generic type in the struct definition where we would specify concrete data types. -Note that because we've only used one generic type in the definition of -`Point`, what we're saying is that the `Point` struct is generic over some type +Note that because we’ve only used one generic type in the definition of +`Point`, what we’re saying is that the `Point` struct is generic over some type `T`, and the fields `x` and `y` are *both* that same type, whatever it ends up being. If we try to create an instance of a `Point` that has values of -different types, as in Listing 10-7, our code won't compile: +different types, as in Listing 10-7, our code won’t compile: Filename: src/main.rs @@ -392,7 +392,7 @@ fn main() { Listing 10-7: The fields `x` and `y` must be the same type because both have the same generic data type `T` -If we try to compile this, we'll get the following error: +If we try to compile this, we’ll get the following error: ``` error[E0308]: mismatched types @@ -413,7 +413,7 @@ type mismatch error. If we wanted to define a `Point` struct where `x` and `y` could have different types but still have those types be generic, we can use multiple generic type -parameters. In listing 10-8, we've changed the definition of `Point` to be +parameters. In listing 10-8, we’ve changed the definition of `Point` to be generic over types `T` and `U`. The field `x` is of type `T`, and the field `y` is of type `U`: @@ -438,14 +438,14 @@ values of different types Now all of these instances of `Point` are allowed! You can use as many generic type parameters in a definition as you want, but using more than a few gets hard to read and understand. If you get to a point of needing lots of generic -types, it's probably a sign that your code could use some restructuring to be +types, it’s probably a sign that your code could use some restructuring to be separated into smaller pieces. ### Using Generic Data Types in Enum Definitions Similarly to structs, enums can be defined to hold generic data types in their variants. We used the `Option` enum provided by the standard library in -Chapter 6, and now its definition should make more sense. Let's take another +Chapter 6, and now its definition should make more sense. Let’s take another look: ``` @@ -457,9 +457,9 @@ enum Option { In other words, `Option` is an enum generic in type `T`. It has two variants: `Some`, which holds one value of type `T`, and a `None` variant that -doesn't hold any value. The standard library only has to have this one +doesn’t hold any value. The standard library only has to have this one definition to support the creation of values of this enum that have any -concrete type. The idea of "an optional value" is a more abstract concept than +concrete type. The idea of “an optional value” is a more abstract concept than one specific type, and Rust lets us express this abstract concept without lots of duplication. @@ -491,7 +491,7 @@ definitions to introduce generic types instead. Like we did in Chapter 5, we can implement methods on structs and enums that have generic types in their definitions. Listing 10-9 shows the `Point` -struct we defined in Listing 10-6. We've then defined a method named `x` on +struct we defined in Listing 10-6. We’ve then defined a method named `x` on `Point` that returns a reference to the data in the field `x`: Filename: src/main.rs @@ -519,13 +519,13 @@ Listing 10-9: Implementing a method named `x` on the `Point` struct that will return a reference to the `x` field, which is of type `T`. Note that we have to declare `T` just after `impl`, so that we can use it when -we specify that we're implementing methods on the type `Point`. +we specify that we’re implementing methods on the type `Point`. -Generic type parameters in a struct definition aren't always the same generic -type parameters you want to use in that struct's method signatures. Listing +Generic type parameters in a struct definition aren’t always the same generic +type parameters you want to use in that struct’s method signatures. Listing 10-10 defines a method `mixup` on the `Point` struct from Listing 10-8. The method takes another `Point` as a parameter, which might have different -types than the `self` `Point` that we're calling `mixup` on. The method creates +types than the `self` `Point` that we’re calling `mixup` on. The method creates a new `Point` instance that has the `x` value from the `self` `Point` (which is of type `T`) and the `y` value from the passed-in `Point` (which is of type `W`): @@ -557,10 +557,10 @@ fn main() { } ``` -Listing 10-10: Methods that use different generic types than their struct's +Listing 10-10: Methods that use different generic types than their struct’s definition -In `main`, we've defined a `Point` that has an `i32` for `x` (with value `5`) +In `main`, we’ve defined a `Point` that has an `i32` for `x` (with value `5`) and an `f64` for `y` (with value `10.4`). `p2` is a `Point` that has a string slice for `x` (with value `"Hello"`) and a `char` for `y` (with value `c`). Calling `mixup` on `p1` with the argument `p2` gives us `p3`, which will have @@ -573,7 +573,7 @@ declared after `fn mixup`, since they are only relevant to the method. ### Performance of Code Using Generics -You may have been reading this section and wondering if there's a run-time cost +You may have been reading this section and wondering if there’s a run-time cost to using generic type parameters. Good news: the way that Rust has implemented generics means that your code will not run any slower than if you had specified concrete types instead of generic type parameters! @@ -587,7 +587,7 @@ the generic function in Listing 10-5. The compiler looks at all the places that generic code is called and generates code for the concrete types that the generic code is called with. -Let's work through an example that uses the standard library's `Option` enum: +Let’s work through an example that uses the standard library’s `Option` enum: ``` let integer = Some(5); @@ -627,7 +627,7 @@ We can write the non-duplicated code using generics, and Rust will compile that into code that specifies the type in each instance. That means we pay no runtime cost for using generics; when the code runs, it performs just like it would if we had duplicated each particular definition by hand. The process of -monomorphization is what makes Rust's generics extremely efficient at runtime. +monomorphization is what makes Rust’s generics extremely efficient at runtime. ## Traits: Defining Shared Behavior @@ -638,7 +638,7 @@ situations where we use generic type parameters, we can use *trait bounds* to specify, at compile time, that the generic type may be any type that implements a trait and therefore has the behavior we want to use in that situation. -> Note: *Traits* are similar to a feature often called 'interfaces' in other +> Note: *Traits* are similar to a feature often called ‘interfaces’ in other > languages, though with some differences. ### Defining a Trait @@ -656,7 +656,7 @@ tweet. We want to make a media aggregator library that can display summaries of data that might be stored in a `NewsArticle` or `Tweet` instance. The behavior we -need each struct to have is that it's able to be summarized, and that we can +need each struct to have is that it’s able to be summarized, and that we can ask for that summary by calling a `summary` method on an instance. Listing 10-11 shows the definition of a `Summarizable` trait that expresses this concept: @@ -672,7 +672,7 @@ pub trait Summarizable { Listing 10-11: Definition of a `Summarizable` trait that consists of the behavior provided by a `summary` method -We declare a trait with the `trait` keyword, then the trait's name, in this +We declare a trait with the `trait` keyword, then the trait’s name, in this case `Summarizable`. Inside curly braces we declare the method signatures that describe the behaviors that types that implement this trait will need to have, in this case `fn summary(&self) -> String`. After the method signature, instead @@ -687,11 +687,11 @@ listed one per line and each line ending in a semicolon. ### Implementing a Trait on a Type -Now that we've defined the `Summarizable` trait, we can implement it on the +Now that we’ve defined the `Summarizable` trait, we can implement it on the types in our media aggregator that we want to have this behavior. Listing 10-12 shows an implementation of the `Summarizable` trait on the `NewsArticle` struct that uses the headline, the author, and the location to create the return value -of `summary`. For the `Tweet` struct, we've chosen to define `summary` as the +of `summary`. For the `Tweet` struct, we’ve chosen to define `summary` as the username followed by the whole text of the tweet, assuming that tweet content is already limited to 140 characters. @@ -728,7 +728,7 @@ impl Summarizable for Tweet { Listing 10-12: Implementing the `Summarizable` trait on the `NewsArticle` and `Tweet` types -Implementing a trait on a type is similar to implementing methods that aren't +Implementing a trait on a type is similar to implementing methods that aren’t related to a trait. The difference is after `impl`, we put the trait name that we want to implement, then say `for` and the name of the type that we want to implement the trait for. Within the `impl` block, we put the method signatures @@ -737,8 +737,8 @@ each signature, we put curly braces and fill in the method body with the specific behavior that we want the methods of the trait to have for the particular type. -Once we've implemented the trait, we can call the methods on instances of -`NewsArticle` and `Tweet` in the same manner that we call methods that aren't +Once we’ve implemented the trait, we can call the methods on instances of +`NewsArticle` and `Tweet` in the same manner that we call methods that aren’t part of a trait: ``` @@ -755,10 +755,10 @@ println!("1 new tweet: {}", tweet.summary()); This will print `1 new tweet: horse_ebooks: of course, as you probably already know, people`. -Note that because we've defined the `Summarizable` trait and the `NewsArticle` -and `Tweet` types all in the same `lib.rs` in Listing 10-12, they're all in the -same scope. If this `lib.rs` is for a crate we've called `aggregator`, and -someone else wants to use our crate's functionality plus implement the +Note that because we’ve defined the `Summarizable` trait and the `NewsArticle` +and `Tweet` types all in the same `lib.rs` in Listing 10-12, they’re all in the +same scope. If this `lib.rs` is for a crate we’ve called `aggregator`, and +someone else wants to use our crate’s functionality plus implement the `Summarizable` trait on their `WeatherForecast` struct, their code would need to import the `Summarizable` trait into their scope first before they could implement it, like in Listing 10-13: @@ -793,25 +793,25 @@ put the `pub` keyword before `trait` in Listing 10-11. One restriction to note with trait implementations: we may implement a trait on a type as long as either the trait or the type are local to our crate. In other -words, we aren't allowed to implement external traits on external types. We -can't implement the `Display` trait on `Vec`, for example, since both `Display` +words, we aren’t allowed to implement external traits on external types. We +can’t implement the `Display` trait on `Vec`, for example, since both `Display` and `Vec` are defined in the standard library. We are allowed to implement standard library traits like `Display` on a custom type like `Tweet` as part of our `aggregator` crate functionality. We could also implement `Summarizable` on -`Vec` in our `aggregator` crate, since we've defined `Summarizable` there. This -restriction is part of what's called the *orphan rule*, which you can look up -if you're interested in type theory. Briefly, it's called the orphan rule +`Vec` in our `aggregator` crate, since we’ve defined `Summarizable` there. This +restriction is part of what’s called the *orphan rule*, which you can look up +if you’re interested in type theory. Briefly, it’s called the orphan rule because the parent type is not present. Without this rule, two crates could implement the same trait for the same type, and the two implementations would -conflict: Rust wouldn't know which implementation to use. Because Rust enforces -the orphan rule, other people's code can't break your code and vice versa. +conflict: Rust wouldn’t know which implementation to use. Because Rust enforces +the orphan rule, other people’s code can’t break your code and vice versa. ### Default Implementations -Sometimes it's useful to have default behavior for some or all of the methods +Sometimes it’s useful to have default behavior for some or all of the methods in a trait, instead of making every implementation on every type define custom behavior. When we implement the trait on a particular type, we can choose to -keep or override each method's default behavior. +keep or override each method’s default behavior. Listing 10-14 shows how we could have chosen to specify a default string for the `summary` method of the `Summarize` trait instead of only choosing to only @@ -838,7 +838,7 @@ Listing 10-12, we would specify an empty `impl` block: impl Summarizable for NewsArticle {} ``` -Even though we're no longer choosing to define the `summary` method on +Even though we’re no longer choosing to define the `summary` method on `NewsArticle` directly, since the `summary` method has a default implementation and we specified that `NewsArticle` implements the `Summarizable` trait, we can still call the `summary` method on an instance of `NewsArticle`: @@ -861,11 +861,11 @@ Changing the `Summarizable` trait to have a default implementation for `summary` does not require us to change anything about the implementations of `Summarizable` on `Tweet` in Listing 10-12 or `WeatherForecast` in Listing 10-13: the syntax for overriding a default implementation is exactly the same -as the syntax for implementing a trait method that doesn't have a default +as the syntax for implementing a trait method that doesn’t have a default implementation. Default implementations are allowed to call the other methods in the same -trait, even if those other methods don't have a default implementation. In this +trait, even if those other methods don’t have a default implementation. In this way, a trait can provide a lot of useful functionality and only require implementers to specify a small part of it. We could choose to have the `Summarizable` trait also have an `author_summary` method whose implementation @@ -882,7 +882,7 @@ pub trait Summarizable { } ``` -In order to use this version of `Summarizable`, we're only required to define +In order to use this version of `Summarizable`, we’re only required to define `author_summary` when we implement the trait on a type: ``` @@ -895,7 +895,7 @@ impl Summarizable for Tweet { Once we define `author_summary`, we can call `summary` on instances of the `Tweet` struct, and the default implementation of `summary` will call the -definition of `author_summary` that we've provided. +definition of `author_summary` that we’ve provided. ``` let tweet = Tweet { @@ -915,7 +915,7 @@ overridden implementation. ### Trait Bounds -Now that we've defined traits and implemented those traits on types, we can use +Now that we’ve defined traits and implemented those traits on types, we can use traits with generic type parameters. We can constrain generic types so that rather than being any type, the compiler will ensure that the type will be limited to those types that implement a particular trait and thus have the @@ -938,10 +938,10 @@ pub fn notify(item: T) { Trait bounds go with the declaration of the generic type parameter, after a colon and within the angle brackets. Because of the trait bound on `T`, we can call `notify` and pass in any instance of `NewsArticle` or `Tweet`. The -external code from Listing 10-13 that's using our `aggregator` crate can call +external code from Listing 10-13 that’s using our `aggregator` crate can call our `notify` function and pass in an instance of `WeatherForecast`, since `Summarizable` is implemented for `WeatherForecast` as well. Code that calls -`notify` with any other type, like a `String` or an `i32`, won't compile, since +`notify` with any other type, like a `String` or an `i32`, won’t compile, since those types do not implement `Summarizable`. We can specify multiple trait bounds on a generic type by using `+`. If we @@ -952,8 +952,8 @@ and `Display`. For functions that have multiple generic type parameters, each generic has its own trait bounds. Specifying lots of trait bound information in the angle -brackets between a function's name and its parameter list can get hard to read, -so there's an alternate syntax for specifying trait bounds that lets us move +brackets between a function’s name and its parameter list can get hard to read, +so there’s an alternate syntax for specifying trait bounds that lets us move them to a `where` clause after the function signature. So instead of: ``` @@ -969,14 +969,14 @@ fn some_function(t: T, u: U) -> i32 { ``` -This is less cluttered and makes this function's signature look more similar to +This is less cluttered and makes this function’s signature look more similar to a function without lots of trait bounds, in that the function name, parameter list, and return type are close together. ### Fixing the `largest` Function with Trait Bounds So any time you want to use behavior defined by a trait on a generic, you need -to specify that trait in the generic type parameter's type bounds. We can now +to specify that trait in the generic type parameter’s type bounds. We can now fix the definition of the `largest` function that uses a generic type parameter from Listing 10-5! When we set that code aside, we were getting this error: @@ -994,14 +994,14 @@ using the greater-than operator. That operator is defined as a default method on the standard library trait `std::cmp::PartialOrd`. So in order to be able to use the greater-than operator, we need to specify `PartialOrd` in the trait bounds for `T` so that the `largest` function will work on slices of any type -that can be compared. We don't need to bring `PartialOrd` into scope because -it's in the prelude. +that can be compared. We don’t need to bring `PartialOrd` into scope because +it’s in the prelude. ``` fn largest(list: &[T]) -> T { ``` -If we try to compile this, we'll get different errors: +If we try to compile this, we’ll get different errors: ``` error[E0508]: cannot move out of type `[T]`, a non-copy array @@ -1027,8 +1027,8 @@ With our non-generic versions of the `largest` function, we were only trying to find the largest `i32` or `char`. As we discussed in Chapter 4, types like `i32` and `char` that have a known size can be stored on the stack, so they implement the `Copy` trait. When we changed the `largest` function to be -generic, it's now possible that the `list` parameter could have types in it -that don't implement the `Copy` trait, which means we wouldn't be able to move +generic, it’s now possible that the `list` parameter could have types in it +that don’t implement the `Copy` trait, which means we wouldn’t be able to move the value out of `list[0]` and into the `largest` variable. If we only want to be able to call this code with types that are `Copy`, we can @@ -1070,34 +1070,34 @@ fn main() { Listing 10-15: A working definition of the `largest` function that works on any generic type that implements the `PartialOrd` and `Copy` traits -If we don't want to restrict our `largest` function to only types that +If we don’t want to restrict our `largest` function to only types that implement the `Copy` trait, we could specify that `T` has the trait bound `Clone` instead of `Copy` and clone each value in the slice when we want the -`largest` function to have ownership. Using the `clone` function means we're +`largest` function to have ownership. Using the `clone` function means we’re potentially making more heap allocations, though, and heap allocations can be -slow if we're working with large amounts of data. Another way we could +slow if we’re working with large amounts of data. Another way we could implement `largest` is for the function to return a reference to a `T` value in the slice. If we change the return type to be `&T` instead of `T` and change -the body of the function to return a reference, we wouldn't need either the -`Clone` or `Copy` trait bounds and we wouldn't be doing any heap allocations. +the body of the function to return a reference, we wouldn’t need either the +`Clone` or `Copy` trait bounds and we wouldn’t be doing any heap allocations. Try implementing these alternate solutions on your own! Traits and trait bounds let us write code that uses generic type parameters in order to reduce duplication, but still specify to the compiler exactly what -behavior our code needs the generic type to have. Because we've given the trait +behavior our code needs the generic type to have. Because we’ve given the trait bound information to the compiler, it can check that all the concrete types used with our code provide the right behavior. In dynamically typed languages, -if we tried to call a method on a type that the type didn't implement, we'd get -an error at runtime. Rust moves these errors to compile time so that we're +if we tried to call a method on a type that the type didn’t implement, we’d get +an error at runtime. Rust moves these errors to compile time so that we’re forced to fix the problems before our code is even able to run. Additionally, -we don't have to write code that checks for behavior at runtime since we've +we don’t have to write code that checks for behavior at runtime since we’ve already checked at compile time, which improves performance compared to other languages without having to give up the flexibility of generics. -There's another kind of generics that we've been using without even realizing +There’s another kind of generics that we’ve been using without even realizing it called *lifetimes*. Rather than helping us ensure that a type has the behavior we need it to have, lifetimes help us ensure that references are valid -as long as we need them to be. Let's learn how lifetimes do that. +as long as we need them to be. Let’s learn how lifetimes do that. ## Validating References with Lifetimes @@ -1110,19 +1110,19 @@ of references could be related in a few different ways, so Rust needs us to annotate the relationships using generic lifetime parameters so that it can make sure the actual references used at runtime will definitely be valid. -Yes, it's a bit unusual, and will be different to tools you've used in other -programming languages. Lifetimes are, in some ways, Rust's most distinctive +Yes, it’s a bit unusual, and will be different to tools you’ve used in other +programming languages. Lifetimes are, in some ways, Rust’s most distinctive feature. -Lifetimes are a big topic that can't be covered in entirety in this chapter, so -we'll cover common ways you might encounter lifetime syntax in this chapter to +Lifetimes are a big topic that can’t be covered in entirety in this chapter, so +we’ll cover common ways you might encounter lifetime syntax in this chapter to get you familiar with the concepts. Chapter 19 will contain more advanced information about everything lifetimes can do. ### Lifetimes Prevent Dangling References The main aim of lifetimes is to prevent dangling references, which will cause a -program to reference data other than the data we're intending to reference. +program to reference data other than the data we’re intending to reference. Consider the program in Listing 10-16, with an outer scope and an inner scope. The outer scope declares a variable named `r` with no initial value, and the inner scope declares a variable named `x` with the initial value of 5. Inside @@ -1149,9 +1149,9 @@ Listing 10-16: An attempt to use a reference whose value has gone out of scope > The next few examples declare variables without giving them an initial value, > so that the variable name exists in the outer scope. This might appear to be > in conflict with Rust not having null. However, if we try to use a variable -> before giving it a value, we'll get a compile-time error. Try it out! +> before giving it a value, we’ll get a compile-time error. Try it out! -When we compile this code, we'll get an error: +When we compile this code, we’ll get an error: ``` error: `x` does not live long enough @@ -1165,12 +1165,12 @@ error: `x` does not live long enough | - borrowed value needs to live until here ``` -The variable `x` doesn't "live long enough." Why not? Well, `x` is going to go +The variable `x` doesn’t “live long enough.” Why not? Well, `x` is going to go out of scope when we hit the closing curly brace on line 7, ending the inner scope. But `r` is valid for the outer scope; its scope is larger and we say -that it "lives longer." If Rust allowed this code to work, `r` would be +that it “lives longer.” If Rust allowed this code to work, `r` would be referencing memory that was deallocated when `x` went out of scope, and -anything we tried to do with `r` wouldn't work correctly. So how does Rust +anything we tried to do with `r` wouldn’t work correctly. So how does Rust determine that this code should not be allowed? #### The Borrow Checker @@ -1205,7 +1205,7 @@ line and ends with the first closing curly brace on the 7th line. Do you think the text art comments work or should we make an SVG diagram that has nicer looking arrows and labels? /Carol --> -We've annotated the lifetime of `r` with `'a` and the lifetime of `x` with +We’ve annotated the lifetime of `r` with `'a` and the lifetime of `x` with `'b`. As you can see, the inner `'b` block is much smaller than the outer `'a` lifetime block. At compile time, Rust compares the size of the two lifetimes and sees that `r` has a lifetime of `'a`, but that it refers to an object with @@ -1213,7 +1213,7 @@ a lifetime of `'b`. The program is rejected because the lifetime `'b` is shorter than the lifetime of `'a`: the subject of the reference does not live as long as the reference. -Let's look at an example in Listing 10-18 that doesn't try to make a dangling +Let’s look at an example in Listing 10-18 that doesn’t try to make a dangling reference and compiles without any errors: ``` @@ -1234,17 +1234,17 @@ Here, `x` has the lifetime `'b`, which in this case is larger than `'a`. This means `r` can reference `x`: Rust knows that the reference in `r` will always be valid while `x` is valid. -Now that we've shown where the lifetimes of references are in a concrete +Now that we’ve shown where the lifetimes of references are in a concrete example and discussed how Rust analyzes lifetimes to ensure references will -always be valid, let's talk about generic lifetimes of parameters and return +always be valid, let’s talk about generic lifetimes of parameters and return values in the context of functions. ### Generic Lifetimes in Functions -Let's write a function that will return the longest of two string slices. We +Let’s write a function that will return the longest of two string slices. We want to be able to call this function by passing it two string slices, and we want to get back a string slice. The code in Listing 10-19 should print `The -longest string is abcd` once we've implemented the `longest` function: +longest string is abcd` once we’ve implemented the `longest` function: Filename: src/main.rs @@ -1262,7 +1262,7 @@ Listing 10-19: A `main` function that calls the `longest` function to find the longest of two string slices Note that we want the function to take string slices (which are references, as -we talked about in Chapter 4) since we don't want the `longest` function to +we talked about in Chapter 4) since we don’t want the `longest` function to take ownership of its arguments. We want the function to be able to accept slices of a `String` (which is the type of the variable `string1`) as well as string literals (which is what variable `string2` contains). @@ -1284,11 +1284,11 @@ and below). If these topics are confusing you in this context, I'd be interested to know if rereading Chapter 4 clears up that confusion. /Carol --> -Refer back to the "String Slices as Arguments" section of Chapter 4 for more +Refer back to the “String Slices as Arguments” section of Chapter 4 for more discussion about why these are the arguments we want. If we try to implement the `longest` function as shown in Listing 10-20, it -won't compile: +won’t compile: Filename: src/main.rs @@ -1318,25 +1318,25 @@ error[E0106]: missing lifetime specifier ``` The help text is telling us that the return type needs a generic lifetime -parameter on it because Rust can't tell if the reference being returned refers -to `x` or `y`. Actually, we don't know either, since the `if` block in the body +parameter on it because Rust can’t tell if the reference being returned refers +to `x` or `y`. Actually, we don’t know either, since the `if` block in the body of this function returns a reference to `x` and the `else` block returns a reference to `y`! -As we're defining this function, we don't know the concrete values that will be -passed into this function, so we don't know whether the `if` case or the `else` -case will execute. We also don't know the concrete lifetimes of the references -that will be passed in, so we can't look at the scopes like we did in Listings +As we’re defining this function, we don’t know the concrete values that will be +passed into this function, so we don’t know whether the `if` case or the `else` +case will execute. We also don’t know the concrete lifetimes of the references +that will be passed in, so we can’t look at the scopes like we did in Listings 10-17 and 10-18 in order to determine that the reference we return will always -be valid. The borrow checker can't determine this either, because it doesn't +be valid. The borrow checker can’t determine this either, because it doesn’t know how the lifetimes of `x` and `y` relate to the lifetime of the return -value. We're going to add generic lifetime parameters that will define the +value. We’re going to add generic lifetime parameters that will define the relationship between the references so that the borrow checker can perform its analysis. ### Lifetime Annotation Syntax -Lifetime annotations don't change how long any of the references involved live. +Lifetime annotations don’t change how long any of the references involved live. In the same way that functions can accept any type when the signature specifies a generic type parameter, functions can accept references with any lifetime when the signature specifies a generic lifetime parameter. What lifetime @@ -1347,9 +1347,9 @@ parameters must start with an apostrophe `'`. The names of lifetime parameters are usually all lowercase, and like generic types, their names are usually very short. `'a` is the name most people use as a default. Lifetime parameter annotations go after the `&` of a reference, and a space separates the lifetime -annotation from the reference's type. +annotation from the reference’s type. -Here's some examples: we've got a reference to an `i32` without a lifetime +Here’s some examples: we’ve got a reference to an `i32` without a lifetime parameter, a reference to an `i32` that has a lifetime parameter named `'a`, and a mutable reference to an `i32` that also has the lifetime `'a`: @@ -1359,7 +1359,7 @@ and a mutable reference to an `i32` that also has the lifetime `'a`: &'a mut i32 // a mutable reference with an explicit lifetime ``` -One lifetime annotation by itself doesn't have much meaning: lifetime +One lifetime annotation by itself doesn’t have much meaning: lifetime annotations tell Rust how the generic lifetime parameters of multiple references relate to each other. If we have a function with the parameter `first` that is a reference to an `i32` that has the lifetime `'a`, and the @@ -1370,12 +1370,12 @@ as long as the same generic lifetime. ### Lifetime Annotations in Function Signatures -Let's look at lifetime annotations in the context of the `longest` function -we're working on. Just like generic type parameters, generic lifetime +Let’s look at lifetime annotations in the context of the `longest` function +we’re working on. Just like generic type parameters, generic lifetime parameters need to be declared within angle brackets between the function name and the parameter list. The constraint we want to tell Rust about for the references in the parameters and the return value is that they all must have -the same lifetime, which we'll name `'a` and add to each reference as shown in +the same lifetime, which we’ll name `'a` and add to each reference as shown in Listing 10-21: Filename: src/main.rs @@ -1421,11 +1421,11 @@ When concrete references are passed to `longest`, the concrete lifetime that gets substituted for `'a` is the part of the scope of `x` that overlaps with the scope of `y`. Since scopes always nest, another way to say this is that the generic lifetime `'a` will get the concrete lifetime equal to the smaller of -the lifetimes of `x` and `y`. Because we've annotated the returned reference +the lifetimes of `x` and `y`. Because we’ve annotated the returned reference with the same lifetime parameter `'a`, the returned reference will therefore be guaranteed to be valid as long as the shorter of the lifetimes of `x` and `y`. -Let's see how this restricts the usage of the `longest` function by passing in +Let’s see how this restricts the usage of the `longest` function by passing in references that have different concrete lifetimes. Listing 10-22 is a straightforward example that should match your intuition from any language: `string1` is valid until the end of the outer scope, `string2` is valid until @@ -1450,11 +1450,11 @@ fn main() { Listing 10-22: Using the `longest` function with references to `String` values that have different concrete lifetimes -Next, let's try an example that will show that the lifetime of the reference in -`result` must be the smaller lifetime of the two arguments. We'll move the +Next, let’s try an example that will show that the lifetime of the reference in +`result` must be the smaller lifetime of the two arguments. We’ll move the declaration of the `result` variable outside the inner scope, but leave the assignment of the value to the `result` variable inside the scope with -`string2`. Next, we'll move the `println!` that uses `result` outside of the +`string2`. Next, we’ll move the `println!` that uses `result` outside of the inner scope, after it has ended. The code in Listing 10-23 will not compile: Filename: src/main.rs @@ -1472,9 +1472,9 @@ fn main() { ``` Listing 10-23: Attempting to use `result` after `string2` has gone out of scope -won't compile +won’t compile -If we try to compile this, we'll get this error: +If we try to compile this, we’ll get this error: ``` error: `string2` does not live long enough @@ -1496,7 +1496,7 @@ values with the same lifetime parameter, `'a`. We can look at this code as humans and see that `string1` is longer, and therefore `result` will contain a reference to `string1`. Because `string1` has not gone out of scope yet, a reference to `string1` will still be valid for the -`println!`. However, what we've told Rust with the lifetime parameters is that +`println!`. However, what we’ve told Rust with the lifetime parameters is that the lifetime of the reference returned by the `longest` function is the same as the smaller of the lifetimes of the references passed in. Therefore, the borrow checker disallows the code in Listing 10-23 as possibly having an invalid @@ -1505,14 +1505,14 @@ reference. Try designing some more experiments that vary the values and lifetimes of the references passed in to the `longest` function and how the returned reference is used. Make hypotheses about whether your experiments will pass the borrow -checker or not before you compile, then check to see if you're right! +checker or not before you compile, then check to see if you’re right! ### Thinking in Terms of Lifetimes The exact way to specify lifetime parameters depends on what your function is doing. For example, if we changed the implementation of the `longest` function to always return the first argument rather than the longest string slice, we -wouldn't need to specify a lifetime on the `y` parameter. This code compiles: +wouldn’t need to specify a lifetime on the `y` parameter. This code compiles: Filename: src/main.rs @@ -1522,7 +1522,7 @@ fn longest<'a>(x: &'a str, y: &str) -> &'a str { } ``` -In this example, we've specified a lifetime parameter `'a` for the parameter +In this example, we’ve specified a lifetime parameter `'a` for the parameter `x` and the return type, but not for the parameter `y`, since the lifetime of `y` does not have any relationship with the lifetime of `x` or the return value. @@ -1532,7 +1532,7 @@ the reference returned does *not* refer to one of the arguments, the only other possibility is that it refers to a value created within this function, which would be a dangling reference since the value will go out of scope at the end of the function. Consider this attempted implementation of the `longest` -function that won't compile: +function that won’t compile: Filename: src/main.rs @@ -1543,9 +1543,9 @@ fn longest<'a>(x: &str, y: &str) -> &'a str { } ``` -Even though we've specified a lifetime parameter `'a` for the return type, this +Even though we’ve specified a lifetime parameter `'a` for the return type, this implementation fails to compile because the return value lifetime is not -related to the lifetime of the parameters at all. Here's the error message we +related to the lifetime of the parameters at all. Here’s the error message we get: ``` @@ -1564,23 +1564,23 @@ at 1:44... ``` The problem is that `result` will go out of scope and get cleaned up at the end -of the `longest` function, and we're trying to return a reference to `result` -from the function. There's no way we can specify lifetime parameters that would -change the dangling reference, and Rust won't let us create a dangling +of the `longest` function, and we’re trying to return a reference to `result` +from the function. There’s no way we can specify lifetime parameters that would +change the dangling reference, and Rust won’t let us create a dangling reference. In this case, the best fix would be to return an owned data type rather than a reference so that the calling function is then responsible for cleaning up the value. Ultimately, lifetime syntax is about connecting the lifetimes of various -arguments and return values of functions. Once they're connected, Rust has +arguments and return values of functions. Once they’re connected, Rust has enough information to allow memory-safe operations and disallow operations that would create dangling pointers or otherwise violate memory safety. ### Lifetime Annotations in Struct Definitions -Up until now, we've only defined structs to hold owned types. It is possible +Up until now, we’ve only defined structs to hold owned types. It is possible for structs to hold references, but we need to add a lifetime annotation on -every reference in the struct's definition. Listing 10-24 has a struct named +every reference in the struct’s definition. Listing 10-24 has a struct named `ImportantExcerpt` that holds a string slice: Filename: src/main.rs @@ -1614,9 +1614,9 @@ variable `novel`. ### Lifetime Elision -In this section, we've learned that every reference has a lifetime, and we need +In this section, we’ve learned that every reference has a lifetime, and we need to specify lifetime parameters for functions or structs that use references. -However, in Chapter 4 we had a function in the "String Slices" section, shown +However, in Chapter 4 we had a function in the “String Slices” section, shown again in Listing 10-25, that compiled without lifetime annotations: Filename: src/lib.rs @@ -1639,7 +1639,7 @@ Listing 10-25: A function we defined in Chapter 4 that compiled without lifetime annotations, even though the parameter and return type are references The reason this function compiles without lifetime annotations is historical: -in early versions of pre-1.0 Rust, this indeed wouldn't have compiled. Every +in early versions of pre-1.0 Rust, this indeed wouldn’t have compiled. Every reference needed an explicit lifetime. At that time, the function signature would have been written like this: @@ -1650,22 +1650,22 @@ fn first_word<'a>(s: &'a str) -> &'a str { After writing a lot of Rust code, the Rust team found that Rust programmers were typing the same lifetime annotations over and over in particular situations. These situations were predictable and followed a few deterministic -patterns. The Rust team then programmed these patterns into the Rust compiler's +patterns. The Rust team then programmed these patterns into the Rust compiler’s code so that the borrow checker can infer the lifetimes in these situations without forcing the programmer to explicitly add the annotations. -We mention this piece of Rust history because it's entirely possible that more +We mention this piece of Rust history because it’s entirely possible that more deterministic patterns will emerge and be added to the compiler. In the future, even fewer lifetime annotations might be required. -The patterns programmed into Rust's analysis of references are called the -*lifetime elision rules*. These aren't rules for programmers to follow; the +The patterns programmed into Rust’s analysis of references are called the +*lifetime elision rules*. These aren’t rules for programmers to follow; the rules are a set of particular cases that the compiler will consider, and if -your code fits these cases, you don't need to write the lifetimes explicitly. +your code fits these cases, you don’t need to write the lifetimes explicitly. -The elision rules don't provide full inference: if Rust deterministically -applies the rules but there's still ambiguity as to what lifetimes the -references have, it won't guess what the lifetime of the remaining references +The elision rules don’t provide full inference: if Rust deterministically +applies the rules but there’s still ambiguity as to what lifetimes the +references have, it won’t guess what the lifetime of the remaining references should be. In this case, the compiler will give you an error that can be resolved by adding the lifetime annotations that correspond to your intentions for how the references relate to each other. @@ -1674,10 +1674,10 @@ First, some definitions: Lifetimes on function or method parameters are called *input lifetimes*, and lifetimes on return values are called *output lifetimes*. Now, on to the rules that the compiler uses to figure out what lifetimes -references have when there aren't explicit annotations. The first rule applies +references have when there aren’t explicit annotations. The first rule applies to input lifetimes, and the second two rules apply to output lifetimes. If the compiler gets to the end of the three rules and there are still references that -it can't figure out lifetimes for, the compiler will stop with an error. +it can’t figure out lifetimes for, the compiler will stop with an error. 1. Each parameter that is a reference gets its own lifetime parameter. In other words, a function with one parameter gets one lifetime parameter: `fn @@ -1692,7 +1692,7 @@ it can't figure out lifetimes for, the compiler will stop with an error. assigned to all output lifetime parameters. This makes writing methods much nicer. -Let's pretend we're the compiler and apply these rules to figure out what the +Let’s pretend we’re the compiler and apply these rules to figure out what the lifetimes of the references in the signature of the `first_word` function in Listing 10-25 are. The signatures starts without any lifetimes associated with the references: @@ -1702,7 +1702,7 @@ fn first_word(s: &str) -> &str { ``` Then we (as the compiler) apply the first rule, which says each parameter gets -its own lifetime. We're going to call it `'a` as usual, so now the signature is: +its own lifetime. We’re going to call it `'a` as usual, so now the signature is: ``` fn first_word<'a>(s: &'a str) -> &str { @@ -1720,14 +1720,14 @@ Now all the references in this function signature have lifetimes, and the compiler can continue its analysis without needing the programmer to annotate the lifetimes in this function signature. -Let's do another example, this time with the `longest` function that had no +Let’s do another example, this time with the `longest` function that had no lifetime parameters when we started working with in Listing 10-20: ``` fn longest(x: &str, y: &str) -> &str { ``` -Pretending we're the compiler again, let's apply the first rule: each parameter +Pretending we’re the compiler again, let’s apply the first rule: each parameter gets its own lifetime. This time we have two parameters, so we have two lifetimes: @@ -1735,16 +1735,16 @@ lifetimes: fn longest<'a, 'b>(x: &'a str, y: &'b str) -> &str { ``` -Looking at the second rule, it doesn't apply since there is more than one input +Looking at the second rule, it doesn’t apply since there is more than one input lifetime. Looking at the third rule, this also does not apply because this is a -function rather than a method, so none of the parameters are `self`. So we're -out of rules, but we haven't figured out what the return type's lifetime is. +function rather than a method, so none of the parameters are `self`. So we’re +out of rules, but we haven’t figured out what the return type’s lifetime is. This is why we got an error trying to compile the code from Listing 10-20: the -compiler worked through the lifetime elision rules it knows, but still can't +compiler worked through the lifetime elision rules it knows, but still can’t figure out all the lifetimes of the references in the signature. -Because the third rule only really applies in method signatures, let's look at -lifetimes in that context now, and see why the third rule means we don't have +Because the third rule only really applies in method signatures, let’s look at +lifetimes in that context now, and see why the third rule means we don’t have to annotate lifetimes in method signatures very often. ### Lifetime Annotations in Method Definitions @@ -1764,16 +1764,16 @@ lifetime parameter is related to the struct fields or the method arguments and return values. Lifetime names for struct fields always need to be declared after the `impl` -keyword and then used after the struct's name, since those lifetimes are part -of the struct's type. +keyword and then used after the struct’s name, since those lifetimes are part +of the struct’s type. In method signatures inside the `impl` block, references might be tied to the -lifetime of references in the struct's fields, or they might be independent. In +lifetime of references in the struct’s fields, or they might be independent. In addition, the lifetime elision rules often make it so that lifetime annotations -aren't necessary in method signatures. Let's look at some examples using the +aren’t necessary in method signatures. Let’s look at some examples using the struct named `ImportantExcerpt` that we defined in Listing 10-24. -First, here's a method named `level`. The only parameter is a reference to +First, here’s a method named `level`. The only parameter is a reference to `self`, and the return value is just an `i32`, not a reference to anything: ``` @@ -1785,10 +1785,10 @@ impl<'a> ImportantExcerpt<'a> { ``` The lifetime parameter declaration after `impl` and use after the type name is -required, but we're not required to annotate the lifetime of the reference to +required, but we’re not required to annotate the lifetime of the reference to `self` because of the first elision rule. -Here's an example where the third lifetime elision rule applies: +Here’s an example where the third lifetime elision rule applies: ``` impl<'a> ImportantExcerpt<'a> { @@ -1831,7 +1831,7 @@ is fixing those problems, not specifying the `'static` lifetime. ### Generic Type Parameters, Trait Bounds, and Lifetimes Together -Let's briefly look at the syntax of specifying generic type parameters, trait +Let’s briefly look at the syntax of specifying generic type parameters, trait bounds, and lifetimes all in one function! ``` @@ -1862,18 +1862,18 @@ the function name. ## Summary We covered a lot in this chapter! Now that you know about generic type -parameters, traits and trait bounds, and generic lifetime parameters, you're -ready to write code that isn't duplicated but can be used in many different +parameters, traits and trait bounds, and generic lifetime parameters, you’re +ready to write code that isn’t duplicated but can be used in many different situations. Generic type parameters mean the code can be applied to different types. Traits and trait bounds ensure that even though the types are generic, those types will have the behavior the code needs. Relationships between the lifetimes of references specified by lifetime annotations ensure that this -flexible code won't have any dangling references. And all of this happens at -compile time so that run-time performance isn't affected! +flexible code won’t have any dangling references. And all of this happens at +compile time so that run-time performance isn’t affected! -Believe it or not, there's even more to learn in these areas: Chapter 17 will +Believe it or not, there’s even more to learn in these areas: Chapter 17 will discuss trait objects, which are another way to use traits. Chapter 19 will be covering more complex scenarios involving lifetime annotations. Chapter 20 will -get to some advanced type system features. Up next, though, let's talk about +get to some advanced type system features. Up next, though, let’s talk about how to write tests in Rust so that we can make sure our code using all these features is working the way we want it to! diff --git a/src/doc/book/second-edition/nostarch/chapter11.md b/src/doc/book/second-edition/nostarch/chapter11.md index 459fe0ef1d..b1209cf0f9 100644 --- a/src/doc/book/second-edition/nostarch/chapter11.md +++ b/src/doc/book/second-edition/nostarch/chapter11.md @@ -37,12 +37,11 @@ tests and integration tests. ## How to Write Tests -Tests are Rust functions that verify that the non-test code in the program is -functioning in the expected manner. The bodies of test functions typically run -some setup code, then run the code we want to test, then assert whether the -results are what we expect. Let’s look at the features Rust provides -specifically for writing tests: the `test` attribute, a few macros, and the -`should_panic` attribute. +Tests are Rust functions that verify that the non-test code is functioning in +the expected manner. The bodies of test functions typically perform some setup, +run the code we want to test, then assert whether the results are what we +expect. Let’s look at the features Rust provides specifically for writing +tests: the `test` attribute, a few macros, and the `should_panic` attribute. ### The Anatomy of a Test Function @@ -56,7 +55,7 @@ on whether each test function passes or fails. We saw in Chapter 7 that when you make a new library project with Cargo, a test module with a test function in it is automatically generated for us. This is to -help us get started writing our tests, since we don’t have to go look up the +help us get started writing our tests so we don’t have to go look up the exact structure and syntax of test functions every time we start a new project. We can add as many additional test functions and as many test modules as we want, though! @@ -171,9 +170,11 @@ test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ``` Let’s add another test, but this time we’ll make a test that fails! Tests fail -when something in the test function panics. We talked about the simplest way to -cause a panic in Chapter 9: call the `panic!` macro! Type in the new test so -that your `src/lib.rs` now looks like Listing 11-3: +when something in the test function panics. Each test is run in a new thread, +and when the main thread sees that a test thread has died, the test is marked +as failed. We talked about the simplest way to cause a panic in Chapter 9: call +the `panic!` macro! Type in the new test so that your `src/lib.rs` now looks +like Listing 11-3: Filename: src/lib.rs @@ -483,7 +484,7 @@ functions that assert two values are equal are called `expected` and `actual` and the order in which we specify the arguments matters. However, in Rust, they’re called `left` and `right` instead, and the order in which we specify the value we expect and the value that the code under test produces doesn’t -matter. We could have written the assertion in this test as +matter. We could write the assertion in this test as `assert_eq!(add_two(2), 4)`, which would result in a failure message that says `` assertion failed: `(left == right)` (left: `5`, right: `4`) ``. @@ -596,7 +597,7 @@ Now if we run the test again, we’ll get a much more informative error message: ``` ---- tests::greeting_contains_name stdout ---- - thread 'tests::greeting_contains_name' panicked at 'Greeting did not contain + thread 'tests::greeting_contains_name' panicked at 'Greeting did not contain name, value was `Hello`', src/lib.rs:12 note: Run with `RUST_BACKTRACE=1` for a backtrace. ``` @@ -616,7 +617,7 @@ outside that range panics. We can do this by adding another attribute, `should_panic`, to our test function. This attribute makes a test pass if the code inside the function -panics, and the test will fail if the code inside the function doesn't panic. +panics, and the test will fail if the code inside the function doesn’t panic. Listing 11-8 shows how we’d write a test that checks the error conditions of `Guess::new` happen when we expect: @@ -955,7 +956,7 @@ function and see what the output looks like then! Sometimes, running a full test suite can take a long time. If you’re working on code in a particular area, you might want to run only the tests pertaining to that code. You can choose which tests to run by passing `cargo test` the name -or names of the test/s you want to run as an argument. +or names of the test(s) you want to run as an argument. To demonstrate how to run a subset of tests, we’ll create three tests for our `add_two` function as shown in Listing 11-11 and choose which ones to run: @@ -1117,8 +1118,8 @@ tests are entirely external to your library, and use your code in the same way any other external code would, using only the public interface and exercising multiple modules per test. -Both kinds of tests are important to ensure that the pieces of your library are -doing what you expect them to separately and together. +Writing both kinds of tests is important to ensure that the pieces of your +library are doing what you expect them to separately and together. ### Unit Tests diff --git a/src/doc/book/second-edition/nostarch/chapter12.md b/src/doc/book/second-edition/nostarch/chapter12.md index 7ca8e763d4..7c07ecb917 100644 --- a/src/doc/book/second-edition/nostarch/chapter12.md +++ b/src/doc/book/second-edition/nostarch/chapter12.md @@ -11,16 +11,16 @@ practice some of the Rust you now have under your belt. Rust’s speed, safety, *single binary* output, and cross-platform support make it a good language for creating command line tools, so for our project we’ll make our own version of the classic command line tool `grep`. Grep is an -acronym for “Globally search a Regular Expression and Print.” In the simplest -use case, `grep` searches a specified file for a specified string. To do so, -`grep` takes a filename and a string as its arguments, then reads the file and -finds lines in that file that contain the string argument. It’ll then print out -those lines. +acronym for “**G**lobally search a **R**egular **E**xpression and **P**rint.” +In the simplest use case, `grep` searches a specified file for a specified +string. To do so, `grep` takes a filename and a string as its arguments, then +reads the file and finds lines in that file that contain the string argument. +It’ll then print out those lines. Along the way, we’ll show how to make our command line tool use features of the -terminal that many command line tools use. We'll read the value of an +terminal that many command line tools use. We’ll read the value of an environment variable in order to allow the user to configure the behavior of -our tool. We'll print to the standard error console stream (`stderr`) instead +our tool. We’ll print to the standard error console stream (`stderr`) instead of standard output (`stdout`) so that, for example, the user can choose to redirect successful output to a file while still seeing error messages on the screen. @@ -42,6 +42,8 @@ This project will bring together a number of concepts you’ve learned so far: We’ll also briefly introduce closures, iterators, and trait objects, which Chapters 13 and 17 will cover in detail. +## Accepting Command Line Arguments + Let’s create a new project with, as always, `cargo new`. We’re calling our project `minigrep` to distinguish from the `grep` tool that you may already have on your system: @@ -52,8 +54,6 @@ $ cargo new --bin minigrep $ cd minigrep ``` -## Accepting Command Line Arguments - Our first task is to make `minigrep` able to accept its two command line arguments: the filename and a string to search for. That is, we want to be able to run our program with `cargo run`, a string to search for, and a path to a @@ -139,18 +139,18 @@ $ cargo run needle haystack You may notice that the first value in the vector is `"target/debug/minigrep"`, which is the name of our binary. This matches the behavior of the arguments list in C, and lets programs use the name by which they were invoked in their -execution. It's convenient to have access to the program name in case we want +execution. It’s convenient to have access to the program name in case we want to print it in messages or change behavior of the program based on what command line alias was used to invoke the program, but for the purposes of this chapter -we're going to ignore it and only save the two arguments we need. +we’re going to ignore it and only save the two arguments we need. ### Saving the Argument Values in Variables Printing out the value of the vector of arguments has illustrated that the -program is able to access the values specified as command line arguments. -That’s not actually our end goal, though: we want to save the values of the two -arguments in variables so that we can use the values in our program. Let’s do -that as shown in Listing 12-2: +program is able to access the values specified as command line arguments. Now +we need to save the values of the two arguments in variables so that we can use +the values throughout the rest of the program. Let’s do that as shown in +Listing 12-2: Filename: src/main.rs @@ -235,6 +235,7 @@ use std::io::prelude::*; fn main() { // ...snip... println!("In file {}", filename); + let mut f = File::open(filename).expect("file not found"); let mut contents = String::new(); @@ -365,9 +366,11 @@ it. Let’s re-work our program by following this process. #### Extracting the Argument Parser -First, we’ll extract the functionality for parsing arguments. Listing 12-5 -shows the new start of `main` that calls a new function `parse_config`, which -we’re still going to define in *src/main.rs* for the moment: +First, we’ll extract the functionality for parsing arguments into a function +that `main` will call to prepare for moving the command line parsing logic to +*src/lib.rs*. Listing 12-5 shows the new start of `main` that calls a new +function `parse_config`, which we’re still going to define in *src/main.rs* for +the moment: Filename: src/main.rs @@ -657,7 +660,7 @@ We’ve made two changes in the body of the `new` function: instead of calling value, and we’ve wrapped the `Config` return value in an `Ok`. These changes make the function conform to its new type signature. -By having `Config::new` return an `Err` value, it allows the `main` function to +Returning an `Err` value from `Config::new` allows the `main` function to handle the `Result` value returned from the `new` function and exit the process more cleanly in the error case. @@ -668,7 +671,7 @@ update `main` to handle the `Result` being returned by `Config::new`, as shown in Listing 12-10. We’re also going to take the responsibility of exiting the command line tool with a nonzero error code from `panic!` and implement it by hand. A nonzero exit status is a convention to signal to the process that -called our program that our program ended with an error state. +called our program that our program exited with an error state. Filename: src/main.rs @@ -706,8 +709,8 @@ code in the closure that will get run in the error case is only two lines: we print out the `err` value, then call `process::exit`. The `process::exit` function will stop the program immediately and return the number that was passed as the exit status code. This is similar to the `panic!`-based handling -we used in Listing 12-8, with the exception that we no longer get all the extra -output. Let’s try it: +we used in Listing 12-8, but we no longer get all the extra output. Let’s try +it: ``` $ cargo run @@ -771,9 +774,9 @@ With the remaining program logic separated into the `run` function, we can improve the error handling like we did with `Config::new` in Listing 12-9. Instead of allowing the program to panic by calling `expect`, the `run` function will return a `Result` when something goes wrong. This will let -us further consolidate the logic around handling errors in a user-friendly way -into `main`. Listing 12-12 shows the changes you need to make to the signature -and body of `run`: +us further consolidate into `main` the logic around handling errors in a +user-friendly way. Listing 12-12 shows the changes you need to make to the +signature and body of `run`: Filename: src/main.rs @@ -837,7 +840,7 @@ to have some error handling code here! Let’s rectify that now. #### Handling Errors Returned from `run` in `main` -We’ll check for errors and handle them using a similar technique to the way we +We’ll check for errors and handle them using a technique similar to the way we handled errors with `Config::new` in Listing 12-10, but with a slight difference: @@ -874,7 +877,7 @@ This is looking pretty good so far! Now we’re going to split the *src/main.rs* file up and put some code into *src/lib.rs* so that we can test it and have a *src/main.rs* file with fewer responsibilities. -Let’s move everything that isn't the `main` function from *src/main.rs* to a +Let’s move everything that isn’t the `main` function from *src/main.rs* to a new file, *src/lib.rs*: * The `run` function definition @@ -883,7 +886,7 @@ new file, *src/lib.rs*: * The `Config::new` function definition The contents of *src/lib.rs* should have the signatures shown in Listing 12-13 -(we've omitted the bodies of the functions for brevity): +(we’ve omitted the bodies of the functions for brevity): Filename: src/lib.rs @@ -939,9 +942,9 @@ fn main() { Listing 12-14: Bringing the `minigrep` crate into the scope of *src/main.rs* -To bring the library crate into the binary crate, we use `extern crate` -`minigrep`. Then we’ll add a `use` `minigrep``::Config` line to bring the -`Config` type into scope, and we'll prefix the `run` function with our crate +To bring the library crate into the binary crate, we use `extern crate +minigrep`. Then we’ll add a `use minigrep::Config` line to bring the +`Config` type into scope, and we’ll prefix the `run` function with our crate name. With that, all the functionality should be connected and should work. Give it a `cargo run` and make sure everything is wired up correctly. @@ -953,23 +956,25 @@ Let’s take advantage of this newfound modularity by doing something that would have been hard with our old code, but is easy with our new code: write some tests! -## Testing the Library’s Functionality +## Developing the Library’s Functionality with Test Driven Development -Now that we’ve extracted the logic into *src/lib.rs* and left all the argument -parsing and error handling in *src/main.rs*, it’s much easier for us to write -tests for the core functionality of our code. We can call our functions +Now that we’ve extracted the logic into *src/lib.rs* and left the argument +collecting and error handling in *src/main.rs*, it’s much easier for us to +write tests for the core functionality of our code. We can call our functions directly with various arguments and check return values without having to call -our binary from the command line. +our binary from the command line. Feel free to write some tests for the +functionality in the `Config::new` and `run` functions on your own if you’d +like. -In this section, we’re going to follow the Test Driven Development (TDD) -process. This is a software development technique that follows this set of -steps: +In this section, we’re going to move on to adding the searching logic of +`minigrep` by following the Test Driven Development (TDD) process. This is a +software development technique that follows this set of steps: * Write a test that fails, and run it to make sure it fails for the reason you -expected. + expected. * Write or modify just enough code to make the new test pass. * Refactor the code you just added or changed, and make sure the tests continue -to pass. + to pass. * Repeat! This is just one of many ways to write software, but TDD can help drive the @@ -1020,7 +1025,7 @@ searching is three lines, only one of which contains “duct”. We assert that value returned from the `search` function contains only the line we expect. We aren’t able to run this test and watch it fail though, since this test -doesn’t even compile–the search function doesn't exist yet! So now we’ll add +doesn’t even compile–the search function doesn’t exist yet! So now we’ll add just enough code to get the tests to compile and run: a definition of the `search` function that always returns an empty vector, as shown in Listing 12-16. Once we have this, the test should compile and fail because an empty @@ -1088,7 +1093,7 @@ test test::one_result ... FAILED failures: ---- test::one_result stdout ---- - thread 'test::one_result' panicked at 'assertion failed: `(left == right)` + thread 'test::one_result' panicked at 'assertion failed: `(left == right)` (left: `["safe, fast, productive."]`, right: `[]`)', src/lib.rs:16 note: Run with `RUST_BACKTRACE=1` for a backtrace. @@ -1142,8 +1147,8 @@ each item in a collection. Next, we’ll add functionality to check if the current line contains the query string. Luckily, strings have another helpful method named `contains` that does -this for us! Add the `contains` method to the `search` function as shown in -Listing 12-18: +this for us! Add a call to the `contains` method in the `search` function as +shown in Listing 12-18: Filename: src/lib.rs @@ -1230,7 +1235,7 @@ pub fn run(config: Config) -> Result<(), Box> { ``` We’re still using a `for` loop to get each line returned from `search` and -printing out each line. +print it out. Now our whole program should be working! Let’s try it out, first with a word that should return exactly one line from the Emily Dickinson poem, “frog”: @@ -1333,7 +1338,7 @@ Trust me."; Listing 12-20: Adding a new failing test for the case insensitive function we’re about to add -Note that we’ve edited the old test’s `contents` too. We've added a new line +Note that we’ve edited the old test’s `contents` too. We’ve added a new line with the text “Duct tape”, with a capital D, that shouldn’t match the query “duct” when we’re searching in a case sensitive manner. Changing the old test in this way helps ensure that we don’t accidentally break the case sensitive @@ -1536,46 +1541,48 @@ variables! Some programs allow both arguments *and* environment variables for the same configuration. In those cases, the programs decide that one or the other takes precedence. For another exercise on your own, try controlling case -insensitivity through a command line argument as well as through the -environment variable, and decide which should take precedence if the program is -run with contradictory values. +insensitivity through either a command line argument or an environment +variable. Decide whether the command line argument or the environment variable +should take precedence if the program is run with one set to case sensitive and +one set to case insensitive. The `std::env` module contains many more useful features for dealing with environment variables; check out its documentation to see what’s available. -## Writing Error Messages to `stderr` Instead of `stdout` +## Writing Error Messages to Standard Error Instead of Standard Output At the moment we’re writing all of our output to the terminal with the -`println!` function. Most terminals provide two kinds of output: *standard out* -for general information, and *standard error* for error messages. This -distinction enables users to choose whether to direct a the successful output -of a program to a file but still print error messages to the screen. +`println!` function. Most terminals provide two kinds of output: *standard +output* for general information (sometimes abbreviated as `stdout` in code), +and *standard error* for error messages (`stderr`). This distinction enables +users to choose whether to direct a the successful output of a program to a +file but still print error messages to the screen. -The `println!` function is only capable of printing to standard out, though, so -we have to use something else in order to print to standard error. +The `println!` function is only capable of printing to standard output, though, +so we have to use something else in order to print to standard error. ### Checking Where Errors are Written to First, let’s observe how all content printed by `minigrep` is currently being -written to standard out, including error messages that we want to write to +written to standard output, including error messages that we want to write to standard error instead. We’ll do that by redirecting the standard output stream -to a file while we also intentionally cause an error. We won't redirect the +to a file while we also intentionally cause an error. We won’t redirect the standard error stream, so any content sent to standard error will continue to -display on the screen. Command line programs are expected to send error +display on the screen. Command line programs are expected to send error messages to the standard error stream so that we can still see error messages on the screen even if we choose to redirect the standard output stream to a -file. Our program is not currently well-behaved; we're about to see that it +file. Our program is not currently well-behaved; we’re about to see that it saves the error message output to the file instead! The way to demonstrate this behavior is by running the program with `>` and the filename, *output.txt*, that we want to redirect the standard output stream to. -We're not going to pass any arguments, which should cause an error: +We’re not going to pass any arguments, which should cause an error: ``` $ cargo run > output.txt ``` -The `>` syntax tells the shell to write the contents of standard out to +The `>` syntax tells the shell to write the contents of standard output to *output.txt* instead of the screen. We didn’t see the error message we were expecting printed on the screen, so that means it must have ended up in the file. Let’s see what *output.txt* contains: @@ -1584,10 +1591,10 @@ file. Let’s see what *output.txt* contains: Problem parsing arguments: not enough arguments ``` -Yup, our error message is being printed to standard out. It’s much more useful -for error messages like this to be printed to standard error, and have only -data from a successful run end up in the file when we redirect standard out in -this way. We’ll change that. +Yup, our error message is being printed to standard output. It’s much more +useful for error messages like this to be printed to standard error, and have +only data from a successful run end up in the file when we redirect standard +output in this way. We’ll change that. ### Printing Errors to Standard Error @@ -1595,7 +1602,7 @@ Let’s change how error messages are printed using the code in Listing 12-24. Because of the refactoring we did earlier in this chapter, all the code that prints error messages is in one function, in `main`. The standard library provides the `eprintln!` macro that prints to the standard error stream, so -let's change the two places we were calling `println!` to print errors so that +let’s change the two places we were calling `println!` to print errors so that these spots use `eprintln!` instead: Filename: src/main.rs @@ -1617,11 +1624,11 @@ fn main() { } ``` -Listing 12-24: Writing error messages to `stderr` instead of `stdout` using -`eprintln!` +Listing 12-24: Writing error messages to standard error instead of standard +output using `eprintln!` After changing `println!` to `eprintln!`, let’s try running the program again -in the same way, without any arguments and redirecting `stdout` with `>`: +in the same way, without any arguments and redirecting standard output with `>`: ``` $ cargo run > output.txt @@ -1632,7 +1639,7 @@ Now we see our error on the screen and `output.txt` contains nothing, which is the behavior expected of command line programs. If we run the program again with arguments that don’t cause an error, but still -redirect standard out to a file: +redirect standard output to a file: ``` $ cargo run to poem.txt > output.txt @@ -1648,19 +1655,18 @@ Are you nobody, too? How dreary to be somebody! ``` -This demonstrates that we’re now using `stdout` for successful output and -`stderr` for error output as appropriate. +This demonstrates that we’re now using standard output for successful output and +standard error for error output as appropriate. ## Summary In this chapter, we’ve recapped on some of the major concepts so far and covered how to do common I/O operations in a Rust context. By using command -line arguments, files, environment variables, and the `eprintln!` macro with -`stderr`, you’re now prepared to write command line applications. By using the -concepts from previous chapters, your code will be well-organized, be able to -store data effectively in the appropriate data structures, handle errors -nicely, and be well tested. +line arguments, files, environment variables, and the `eprintln!` macro for +printing errors, you’re now prepared to write command line applications. By +using the concepts from previous chapters, your code will be well-organized, be +able to store data effectively in the appropriate data structures, handle +errors nicely, and be well tested. Next, let’s explore some functional-language influenced Rust features: closures and iterators. - diff --git a/src/doc/book/second-edition/nostarch/chapter13.md b/src/doc/book/second-edition/nostarch/chapter13.md index 6a96d9c26a..283bd6ff90 100644 --- a/src/doc/book/second-edition/nostarch/chapter13.md +++ b/src/doc/book/second-edition/nostarch/chapter13.md @@ -1,3 +1,4 @@ + [TOC] # Functional Language features in Rust: Iterators and Closures @@ -7,27 +8,27 @@ this chapter. Do you have a suggestion on how to make that clearer than the text in the intro paragraph here? /Carol --> -Rust's design has taken inspiration from a lot of existing languages and +Rust’s design has taken inspiration from a lot of existing languages and techniques, and one significant influence is *functional programming*. Programming in a functional style often includes using functions as values in arguments or return values of other functions, assigning functions to variables -for later execution, and so forth. We won't debate here the issue of what, +for later execution, and so forth. We won’t debate here the issue of what, exactly, functional programming is or is not, but will instead show off some features of Rust that are similar to features in many languages often referred to as functional. -More specifically, we're going to cover: +More specifically, we’re going to cover: * *Closures*: a function-like construct you can store in a variable. * *Iterators*: a way of processing a series of elements. * How to use these features to improve on the I/O project from Chapter 12. -* The performance of these features. Spoiler alert: they're faster than you +* The performance of these features. Spoiler alert: they’re faster than you might think! There are other Rust features influenced by the functional style, like pattern -matching and enums, that we've covered in other chapters as well. Mastering +matching and enums, that we’ve covered in other chapters as well. Mastering closures and iterators is an important part of writing idiomatic, fast Rust -code, so we're devoting an entire chapter to them here. +code, so we’re devoting an entire chapter to them here. ## Closures: Anonymous Functions that can Capture their Environment @@ -37,11 +38,11 @@ don't think this is quite right, is there a shorter heading we could use to capture what a closure is/is for? --> -Rust's *closures* are anonymous functions that you can save in a variable or +Rust’s *closures* are anonymous functions that you can save in a variable or pass as arguments to other functions. You can create the closure in one place, and then call the closure to evaluate it in a different context. Unlike functions, closures are allowed to capture values from the scope in which they -are called. We're going to demonstrate how these features of closures allow for +are called. We’re going to demonstrate how these features of closures allow for code reuse and customization of behavior. ### Creating an Abstraction of Behavior Using a Closure -Let's work on an example that will show a situation where storing a closure to -be executed at a later time is useful. We'll talk about the syntax of closures, +Let’s work on an example that will show a situation where storing a closure to +be executed at a later time is useful. We’ll talk about the syntax of closures, type inference, and traits along the way. -The hypothetical situation is this: we're working at a startup that's making an +The hypothetical situation is this: we’re working at a startup that’s making an app to generate custom exercise workout plans. The backend is written in Rust, and the algorithm that generates the workout plan takes into account many -different factors like the app user's age, their Body Mass Index, their +different factors like the app user’s age, their Body Mass Index, their preferences, their recent workouts, and an intensity number they specify. The -actual algorithm used isn't important in this example; what's important is that +actual algorithm used isn’t important in this example; what’s important is that this calculation takes a few seconds. We only want to call this algorithm if we -need to, and we only want to call it once, so that we aren't making the user -wait more than they need to. We're going to simulate calling this hypothetical +need to, and we only want to call it once, so that we aren’t making the user +wait more than they need to. We’re going to simulate calling this hypothetical algorithm by calling the `simulated_expensive_calculation` function shown in Listing 13-1 instead, which will print `calculating slowly...`, wait for two seconds, and then return whatever number we passed in: Filename: src/main.rs -```rust +``` use std::thread; use std::time::Duration; fn simulated_expensive_calculation(intensity: i32) -> i32 { println!("calculating slowly..."); - thread::sleep(Duration::new(2, 0)); + thread::sleep(Duration::from_secs(2)); intensity } ``` -Listing 13-1: A function we'll use to stand in for a hypothetical calculation +Listing 13-1: A function we’ll use to stand in for a hypothetical calculation that takes about two seconds to run Next, we have a `main` function that contains the parts of the workout app that are important for this example. This represents the code that the app would call when a user asks for a workout plan. Because the interaction with the -app's frontend isn't relevant to the use of closures, we're going to hardcode +app’s frontend isn’t relevant to the use of closures, we’re going to hardcode values representing inputs to our program and print the outputs. The inputs to the program are: - An `intensity` number from the user, specified when they request a workout, - so they can indicate whether they'd like a low intensity workout or a high + so they can indicate whether they’d like a low intensity workout or a high intensity workout - A random number that will generate some variety in the workout plans The output the program prints will be the recommended workout plan. -Listing 13-2 shows the `main` function we're going to use. We've hardcoded the +Listing 13-2 shows the `main` function we’re going to use. We’ve hardcoded the variable `simulated_user_specified_value` to 10 and the variable -`simulated_random_number` to 7 for simplicity's sake; in an actual program we'd -get the intensity number from the app frontend and we'd use the `rand` crate to +`simulated_random_number` to 7 for simplicity’s sake; in an actual program we’d +get the intensity number from the app frontend and we’d use the `rand` crate to generate a random number like we did in the Guessing Game example in Chapter 2. The `main` function calls a `generate_workout` function with the simulated input values: Filename: src/main.rs -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# +``` fn main() { let simulated_user_specified_value = 10; let simulated_random_number = 7; generate_workout(simulated_user_specified_value, simulated_random_number); } -# fn generate_workout(intensity: i32, random_number: i32) {} ``` Listing 13-2: A `main` function containing hardcoded values to simulate user input and random number generation inputs to the `generate_workout` function -That's the context of what we're working on. The `generate_workout` function in -Listing 13-3 contains the business logic of the app that we're most concerned +That’s the context of what we’re working on. The `generate_workout` function in +Listing 13-3 contains the business logic of the app that we’re most concerned with in this example. The rest of the code changes in this example will be made to this function: Filename: src/main.rs -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# +``` fn generate_workout(intensity: i32, random_number: i32) { if intensity < 25 { println!( @@ -183,7 +165,7 @@ based on the inputs and calls to the `simulated_expensive_calculation` function The code in Listing 13-3 has multiple calls to the slow calculation function. The first `if` block calls `simulated_expensive_calculation` twice, the `if` -inside the outer `else` doesn't call it at all, and the code inside the `else` +inside the outer `else` doesn’t call it at all, and the code inside the `else` case inside the outer `else` calls it once. @@ -191,44 +173,35 @@ case inside the outer `else` calls it once. The desired behavior of the `generate_workout` function is to first check if the user wants a low intensity workout (indicated by a number less than 25) or a high intensity workout (25 or more). Low intensity workout plans will -recommend a number of pushups and situps based on the complex algorithm we're +recommend a number of pushups and situps based on the complex algorithm we’re simulating with the `simulated_expensive_calculation` function, which needs the intensity number as an input. -If the user wants a high intensity workout, there's some additional logic: if +If the user wants a high intensity workout, there’s some additional logic: if the value of the random number generated by the app happens to be 3, the app will recommend a break and hydration instead. If not, the user will get a high intensity workout of a number of minutes of running that comes from the complex algorithm. The data science team has let us know that there are going to be some changes -to the way we have to call the algorithm, so we want to refactor this code to -have only one place that calls the `simulated_expensive_calculation` function -to update when those changes happen. We also want to get rid of the spot where -we're currently calling the function twice unnecessarily, and we don't want to -add any other calls to that function in the process. That is, we don't want to -call it if we're in the case where the result isn't needed at all, and we still -want to call it only once in the last case. - -There are many ways we could restructure this program. The way we're going to +to the way we have to call the algorithm. To simplify the update when those +changes happen, we would like to refactor this code to have only a single call +to the `simulated_expensive_calculation` function. We also want to get rid of +the spot where we’re currently calling the function twice unnecessarily, and +we don’t want to add any other calls to that function in the process. That is, +we don’t want to call it if we’re in the case where the result isn’t needed at +all, and we still want to call it only once in the last case. + +There are many ways we could restructure this program. The way we’re going to try first is extracting the duplicated call to the expensive calculation function into a variable, as shown in Listing 13-4: Filename: src/main.rs -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# +``` fn generate_workout(intensity: i32, random_number: i32) { let expensive_result = - simulated_expensive_calculation(simulated_user_specified_value); + simulated_expensive_calculation(intensity); if intensity < 25 { println!( @@ -260,8 +233,8 @@ variable This change unifies all the calls to `simulated_expensive_calculation` and solves the problem of the first `if` block calling the function twice -unnecessarily. Unfortunately, we're now calling this function and waiting for -the result in all cases, which includes the inner `if` block that doesn't use +unnecessarily. Unfortunately, we’re now calling this function and waiting for +the result in all cases, which includes the inner `if` block that doesn’t use the result value at all. We want to be able to specify some code in one place in our program, but then @@ -272,25 +245,22 @@ our program. This is a use case for closures! Instead of always calling the `simulated_expensive_calculation` function before the `if` blocks, we can define a closure and store the closure in a variable -instead of the result as shown in Listing 13-5: +instead of the result as shown in Listing 13-5. We can actually choose to move +the whole body of `simulated_expensive_calculation` within the closure we’re +introducing here: Filename: src/main.rs -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# -let expensive_closure = |num| simulated_expensive_calculation(num); +``` +let expensive_closure = |num| { + println!("calculating slowly..."); + thread::sleep(Duration::from_secs(2)); + num +}; ``` -Listing 13-5: Defining a closure that will call the expensive function and -store the closure in the `expensive_closure` variable +Listing 13-5: Defining a closure with the body that was in the expensive +function and store the closure in the `expensive_closure` variable -The closure definition is the part after the `=` that we're assigning to the +The closure definition is the part after the `=` that we’re assigning to the variable `expensive_closure`. To define a closure, we start with a pair of vertical pipes (`|`). Inside the pipes is where we specify the parameters to the closure; this syntax was chosen because of its similarity to closure @@ -306,40 +276,16 @@ definitions in Smalltalk and Ruby. This closure has one parameter named `num`; if we had more than one parameter, we would separate them with commas, like `|param1, param2|`. -After the parameters, we define the body of the closure. This closure has only -one line in its body, `simulated_expensive_calculation(num)`. If we had more -than one line, we must surround the body of the closure with curly braces. We -can choose to do that with only one line in the body, which we can do on the -same line as teh assignment, or we can insert more whitespace as shown in these -two variations: - -Filename: src/main.rs - -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# -let expensive_closure = |num| { simulated_expensive_calculation(num) }; - -let expensive_closure = |num| { - simulated_expensive_calculation(num) -}; -``` - -The semicolon at the end goes with the `let` statement. The value returned from -the call to `simulated_expensive_caluclation(num)`, since it's the last line in -the closure body and that line doesn't end in a semicolon, will be the value -returned from the closure when it's called, just like in function bodies. +After the parameters, we put curly braces that hold the body of the closure. +The curly braces are optional if the closure body only has one line. After the +curly braces, we need a semicolon to go with the `let` statement. The value +returned from the last line in the closure body (`num`), since that line +doesn’t end in a semicolon, will be the value returned from the closure when +it’s called, just like in function bodies. Note that this `let` statement means `expensive_closure` contains the *definition* of an anonymous function, not the *resulting value* of calling the -anonymous function. Recall the reason we're using a closure is because we want +anonymous function. Recall the reason we’re using a closure is because we want to define the code to call at one point, store that code, and actually call it at a later point; the code we want to call is now stored in `expensive_closure`. @@ -352,18 +298,13 @@ containing the argument values we want to use for that call as shown in Listing Filename: src/main.rs -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# +``` fn generate_workout(intensity: i32, random_number: i32) { - let expensive_closure = |num| simulated_expensive_calculation(num); + let expensive_closure = |num| { + println!("calculating slowly..."); + thread::sleep(Duration::from_secs(2)); + num + }; if intensity < 25 { println!( @@ -387,39 +328,39 @@ fn generate_workout(intensity: i32, random_number: i32) { } ``` -Listing 13-6: Calling the `expensive_closure` we've defined +Listing 13-6: Calling the `expensive_closure` we’ve defined -Now we've achieved the goal of unifying where `simulated_expensive_calculation` -is called to one place, and we're only executing that code where we need the -results. However, we've reintroduced one of the problems from Listing 13-3: -we're still calling the closure twice in the first `if` block, which will call -the expensive function twice and make the user wait twice as long as they need -to. We could fix this problem by creating a variable local to that `if` block -to hold the result of calling the closure, but there's another solution we can -use since we have a closure. We'll get back to that solution in a bit; let's -first talk about why there aren't type annotations in the closure definition -and the traits involved with closures. +Now we’ve achieved the goal of unifying where the expensive calculation is +called to one place, and we’re only executing that code where we need the +results. However, we’ve reintroduced one of the problems from Listing 13-3: +we’re still calling the closure twice in the first `if` block, which will call +the expensive code twice and make the user wait twice as long as they need to. +We could fix this problem by creating a variable local to that `if` block to +hold the result of calling the closure, but there’s another solution we can use +since we have a closure. We’ll get back to that solution in a bit; let’s first +talk about why there aren’t type annotations in the closure definition and the +traits involved with closures. ### Closure Type Inference and Annotation -Closure are different than functions defined with the `fn` keyword in a few -ways. The first is that closures don't require you to annotate the types of the +Closures differ from functions defined with the `fn` keyword in a few +ways. The first is that closures don’t require you to annotate the types of the parameters or the return value like `fn` functions do. -This is because functions are part of an explicit interface exposed to your -users, so defining this interface rigidly is important for ensuring that -everyone agrees on what types of values a function uses and returns. Closures -aren't used in an exposed interface like this, though: they're stored in -variables and used without naming them and exposing them to be invoked by users -of our library. +Type annotations are required on functions because they are part of an +explicit interface exposed to your users. Defining this interface rigidly is +important for ensuring that everyone agrees on what types of values a function +uses and returns. Closures aren’t used in an exposed interface like this, +though: they’re stored in variables and used without naming them and exposing +them to be invoked by users of our library. Additionally, closures are usually short and only relevant within a narrow context rather than in any arbitrary scenario. Within these limited contexts, the compiler is reliably able to infer the types of the parameters and return -type similarly to how it's able to infer the types of most variables. Being +type similarly to how it’s able to infer the types of most variables. Being forced to annotate the types in these small, anonymous functions would be annoying and largely redundant with the information the compiler already has available. @@ -442,18 +383,11 @@ would look like the definition shown here in Listing 13-7: Filename: src/main.rs -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# +``` let expensive_closure = |num: i32| -> i32 { - simulated_expensive_calculation(num) + println!("calculating slowly..."); + thread::sleep(Duration::from_secs(2)); + num }; ``` @@ -468,12 +402,11 @@ thing as the functions? --> The syntax of closures and functions looks more similar with type annotations. -Here's a vertical comparison of the syntax for function definitions and the -syntax for closure definitions that all perform the same task as the closure -from Listing 13-4 (we've added some spaces here to line up the relevant parts). -This illustrates how closure syntax is similar to function syntax except for -the use of pipes rather than parentheses and the amount of syntax that is -optional: +Here’s a vertical comparison of the syntax for the definition of a function +that adds one to its parameter, and a closure that has the same behavior. We’ve +added some spaces here to line up the relevant parts). This illustrates how +closure syntax is similar to function syntax except for the use of pipes rather +than parentheses and the amount of syntax that is optional: -```rust,ignore -fn expensive_function (num: i32) -> i32 { simulated_expensive_calculation(num) } -let expensive_closure = |num: i32| -> i32 { simulated_expensive_calculation(num) }; -let expensive_closure = |num| { simulated_expensive_calculation(num) }; -let expensive_closure = |num| simulated_expensive_calculation(num) ; +``` +fn add_one_v1 (x: i32) -> i32 { x + 1 } +let add_one_v2 = |x: i32| -> i32 { x + 1 }; +let add_one_v3 = |x| { x + 1 }; +let add_one_v4 = |x| x + 1 ; ``` @@ -512,26 +445,26 @@ confusing and convey our point better. /Carol --> Closure definitions will have one concrete type inferred for each of their parameters and for their return value. For instance, Listing 13-8 shows the definition of a short closure that just returns the value it gets as a -parameter. This closure isn't very useful except for the purposes of this -example. Note that we haven't added any type annotations to the definition: if +parameter. This closure isn’t very useful except for the purposes of this +example. Note that we haven’t added any type annotations to the definition: if we then try to call the closure twice, using a `String` as an argument the -first time and an `i32` the second time, we'll get an error: +first time and an `i32` the second time, we’ll get an error: Filename: src/main.rs -```rust,ignore +``` let example_closure = |x| x; let s = example_closure(String::from("hello")); let n = example_closure(5); ``` -Listing 13-8: Attempting to call a closure whose types are inferred with two -different types +Listing 13-8: Attempting to call a closure whose types +are inferred with two different types The compiler gives us this error: -```text +``` error[E0308]: mismatched types --> src/main.rs | @@ -555,29 +488,31 @@ error if we try to use a different type with the same closure. Returning to our workout generation app, in Listing 13-6 we left our code still calling the expensive calculation closure more times than it needs to. In each place throughout our code, if we need the results of the expensive closure more -than once, we could save the result in a variable for reuse and use the variable -instead of calling the closure again. This could be a lot of repeated code saving -the results in a variety of places. +than once, we could save the result in a variable for reuse and use the +variable instead of calling the closure again. This could be a lot of repeated +code saving the results in a variety of places. However, because we have a closure for the expensive calculation, we have another solution available to us. We can create a struct that will hold the closure and the resulting value of calling the closure. The struct will only execute the closure if we need the resulting value, and it will cache the -resulting value so that the rest of our code doesn't have to be responsible for +resulting value so that the rest of our code doesn’t have to be responsible for saving and reusing the result. You may know this pattern as *memoization* or *lazy evaluation*. In order to make a struct that holds a closure, we need to be able to specify the type of the closure. Each closure instance has its own unique anonymous -type, so in order to define structs, enums, or function parameters that use -closures, we use generics and trait bounds like we discussed in Chapter 10. +type: that is, even if two closures have the same signature, their types are +still considered to be different. In order to define structs, enums, or +function parameters that use closures, we use generics and trait bounds like we +discussed in Chapter 10. The `Fn` traits are provided by the standard library. All closures implement -one of the traits `Fn`, `FnMut`, or `FnOnce`. We'll discuss the difference +one of the traits `Fn`, `FnMut`, or `FnOnce`. We’ll discuss the difference between these traits in the next section on capturing the environment; in this example, we can use the `Fn` trait. @@ -591,7 +526,7 @@ and an optional result value: Filename: src/main.rs -```rust +``` struct Cacher where T: Fn(i32) -> i32 { @@ -611,24 +546,17 @@ return an `i32` (specified after the `->`). The `value` field is of type `Option`. Before we execute the closure, `value` will be `None`. If the code using a `Cacher` asks for the result of the -closure, we'll execute the closure at that time and store the result within a +closure, we’ll execute the closure at that time and store the result within a `Some` variant in the `value` field. Then if the code asks for the result of -the closure again, instead of executing the closure again, we'll return the -result that we're holding in the `Some` variant. +the closure again, instead of executing the closure again, we’ll return the +result that we’re holding in the `Some` variant. -The logic around the `value` field that we've just described is defined in +The logic around the `value` field that we’ve just described is defined in Listing 13-10: Filename: src/main.rs -```rust -# struct Cacher -# where T: Fn(i32) -> i32 -# { -# calculation: T, -# value: Option, -# } -# +``` impl Cacher where T: Fn(i32) -> i32 { @@ -665,14 +593,14 @@ Listing 13-10: Implementations on `Cacher` of an associated function named The fields on the `Cacher` struct are private since we want `Cacher` to manage their values rather than letting the calling code potentially change the values in these fields directly. The `Cacher::new` function takes a generic parameter -`T`, which we've defined in the context of the `impl` block to have the same +`T`, which we’ve defined in the context of the `impl` block to have the same trait bound as the `Cacher` struct. `Cacher::new` returns a `Cacher` instance that holds the closure specified in the `calculation` field and a `None` value -in the `value` field, since we haven't executed the closure yet. +in the `value` field, since we haven’t executed the closure yet. When the calling code wants the result of evaluating the closure, instead of calling the closure directly, it will call the `value` method. This method -checks to see if we alreaday have a resulting value in `self.value` in a `Some`; +checks to see if we already have a resulting value in `self.value` in a `Some`; if we do, it returns the value within the `Some` without executing the closure again. @@ -684,48 +612,12 @@ Listing 13-11 shows how we can use this `Cacher` struct in the Filename: src/main.rs -```rust -# use std::thread; -# use std::time::Duration; -# -# fn simulated_expensive_calculation(num: i32) -> i32 { -# println!("calculating slowly..."); -# thread::sleep(Duration::new(2, 0)); -# num -# } -# -# struct Cacher -# where T: Fn(i32) -> i32 -# { -# calculation: T, -# value: Option, -# } -# -# impl Cacher -# where T: Fn(i32) -> i32 -# { -# fn new(calculation: T) -> Cacher { -# Cacher { -# calculation, -# value: None, -# } -# } -# -# fn value(&mut self, arg: i32) -> i32 { -# match self.value { -# Some(v) => v, -# None => { -# let v = (self.calculation)(arg); -# self.value = Some(v); -# v -# }, -# } -# } -# } -# +``` fn generate_workout(intensity: i32, random_number: i32) { - let mut expensive_result = Cacher::new(|arg| { - simulated_expensive_calculation(arg) + let mut expensive_result = Cacher::new(|num| { + println!("calculating slowly..."); + thread::sleep(Duration::from_secs(2)); + num }); if intensity < 25 { @@ -755,20 +647,19 @@ away the caching logic -Instead of saving the closure that calls the expensive calculation in a -variable directly, we save a new instance of `Cacher` that holds the closure. -Then, in each place we want the result, we call the `value` method on the -`Cacher` instance. We can call the `value` method as many times as we want, or -not call it at all, and the expensive calculation will be run a maximum of -once. Try running this program with the `main` function from Listing 13-2, and -change the values in the `simulated_user_specified_value` and -`simulated_random_number` variables to verify that in all of the cases in the -various `if` and `else` blocks, `calculating slowly...` printed by the -`simulated_expensive_calculation` function only shows up once and only when +Instead of saving the closure in a variable directly, we save a new instance of +`Cacher` that holds the closure. Then, in each place we want the result, we +call the `value` method on the `Cacher` instance. We can call the `value` +method as many times as we want, or not call it at all, and the expensive +calculation will be run a maximum of once. Try running this program with the +`main` function from Listing 13-2, and change the values in the +`simulated_user_specified_value` and `simulated_random_number` variables to +verify that in all of the cases in the various `if` and `else` blocks, +`calculating slowly...` printed by the closure only shows up once and only when needed. -The `Cacher` takes care of the logic necessary to ensure we aren't calling the -expensive calculation more than we need to be so that `generate_workout` can +The `Cacher` takes care of the logic necessary to ensure we aren’t calling the +expensive calculation more than we need to, so that `generate_workout` can focus on the business logic. Caching values is a more generally useful behavior that we might want to use in other parts of our code with other closures as well. However, there are a few problems with the current implementation of @@ -778,9 +669,9 @@ The first problem is a `Cacher` instance assumes it will always get the same value for the parameter `arg` to the `value` method. That is, this test of `Cacher` will fail: -```rust,ignore +``` #[test] -fn call_with_different_arg_values() { +fn call_with_different_values() { let mut c = Cacher::new(|a| a); let v1 = c.value(1); @@ -798,7 +689,7 @@ to `value` with the `arg` value of 2 returns 2. Run this with the `Cacher` implementation from Listing 13-9 and Listing 13-10 and the test will fail on the `assert_eq!` with this message: -```text +``` thread 'call_with_different_arg_values' panicked at 'assertion failed: `(left == right)` (left: `1`, right: `2`)', src/main.rs ``` @@ -812,7 +703,7 @@ of the hash map will be the `arg` values that are passed in, and the values of the hash map will be the result of calling the closure on that key. Instead of looking at whether `self.value` directly has a `Some` or a `None` value, the `value` function will look up the `arg` in the hash map and return the value if -it's present. If it's not present, the `Cacher` will call the closure and save +it’s present. If it’s not present, the `Cacher` will call the closure and save the resulting value in the hash map associated with its `arg` value. Another problem with the current `Cacher` implementation that restricts its use @@ -825,17 +716,17 @@ functionality. ### Closures Can Capture Their Environment In the workout generator example, we only used closures as inline anonymous -functions. Closures have an additional ability we can use that functions don't +functions. Closures have an additional ability we can use that functions don’t have, however: they can capture their environment and access variables from the -scope in which they're defined. +scope in which they’re defined. - Listing 13-12 has an example of a closure stored in the variable `equal_to_x` -that uses the variable `x` from the closure's surrounding environment: +that uses the variable `x` from the closure’s surrounding environment: -We can't do the same with functions; let's see what happens if we try: +We can’t do the same with functions; let’s see what happens if we try: Filename: src/main.rs -```rust,ignore +``` fn main() { let x = 4; @@ -896,7 +787,7 @@ fn main() { We get an error: -```text +``` error[E0434]: can't capture dynamic environment in a fn item; use the || { ... } closure form instead --> @@ -909,8 +800,8 @@ The compiler even reminds us that this only works with closures! When a closure captures a value from its environment, the closure uses memory to store the values for use in the closure body. This use of memory is overhead -that we don't want pay for in the more common case where we want to execute -code that doesn't capture its environment. Because functions are never allowed +that we don’t want to pay for in the more common case where we want to execute +code that doesn’t capture its environment. Because functions are never allowed to capture their environment, defining and using functions will never incur this overhead. @@ -923,22 +814,70 @@ directly map to the three ways a function can take a parameter: taking ownership, borrowing immutably, and borrowing mutably. These ways of capturing values are encoded in the three `Fn` traits as follows: -* `FnOnce` takes ownership of the environment, and therefore cannot be called - more than once in the same context -* `Fn` borrows values from the environment immutably -* `FnMut` can change the environment since it mutably borrows values +* `FnOnce` consumes the variables it captures from its enclosing scope (the + enclosing scope is called the closure's *environment*). In order to consume + the captured variables, the closure must therefore take ownership of these + variables and moves them into the closure when the closure is defined. The + `Once` part of the name is because the closure can't take ownership of the + same variables more than once, so it can only be called one time. +* `Fn` borrows values from the environment immutably. +* `FnMut` can change the environment since it mutably borrows values. + +When we create a closure, Rust infers how we want to reference the environment +based on how the closure uses the values from the environment. In Listing +13-12, the `equal_to_x` closure borrows `x` immutably (so `equal_to_x` has the +`Fn` trait) since the body of the closure only needs to read the value in `x`. + +If we want to force the closure to take ownership of the values it uses in the +environment, we can use the `move` keyword before the parameter list. This is +mostly useful when passing a closure to a new thread in order to move the data +to be owned by the new thread. We’ll have more examples of `move` closures in +Chapter 16 when we talk about concurrency, but for now here’s the code from +Listing 13-12 with the `move` keyword added to the closure definition and using +vectors instead of integers, since integers can be copied rather than moved: + +Filename: src/main.rs + +``` +fn main() { + let x = vec![1, 2, 3]; + + let equal_to_x = move |z| z == x; + + println!("can't use x here: {:?}", x); -Creating `FnOnce` closures that capture values from their environment is mostly -used in the context of starting new threads. We'll show some examples and -explain more detail about this feature of closures in Chapter 16 when we talk -about concurrency. + let y = vec![1, 2, 3]; + + assert!(equal_to_x(y)); +} +``` + +This example doesn’t compile: + +``` +error[E0382]: use of moved value: `x` + --> src/main.rs:6:40 + | +4 | let equal_to_x = move |z| z == x; + | -------- value moved (into closure) here +5 | +6 | println!("can't use x here: {:?}", x); + | ^ value used here after move + | + = note: move occurs because `x` has type `std::vec::Vec`, which does not + implement the `Copy` trait +``` + +The `x` value is moved into the closure when the closure is defined because of +the `move` keyword. The closure then has ownership of `x`, and `main` isn’t +allowed to use `x` anymore. Removing the `println!` will fix this example. Most of the time when specifying one of the `Fn` trait bounds, you can start with `Fn` and the compiler will tell you if you need `FnMut` or `FnOnce` based -on what happens when the closure is called. +on what happens in the closure body. To illustrate situations where closures that can capture their environment are -useful as function parameters, let's move on to our next topic: iterators. +useful as function parameters, let’s move on to our next topic: iterators. ## Processing a Series of Items with Iterators @@ -955,28 +894,32 @@ to element? Can we define this at the begin of the iterator section? --> The iterator pattern allows you to perform some task on a sequence of items in turn. An *iterator* is responsible for the logic around iterating over each item in the sequence and determining when the sequence has finished. When we use -iterators, we don't have to reimplement that logic ourselves. +iterators, we don’t have to reimplement that logic ourselves. In Rust, iterators are *lazy*, which means they have no effect until we call methods on them that consume the iterator to use it up. For example, the code in Listing 13-13 creates an iterator over the items in the vector `v1` by -calling the `iter` method defined on `Vec`. This code by itself doesn't do +calling the `iter` method defined on `Vec`. This code by itself doesn’t do anything useful: -```rust +``` let v1 = vec![1, 2, 3]; let v1_iter = v1.iter(); ``` -Listing 13-13: Creating an iterator; this by itself isn't useful +Listing 13-13: Creating an iterator After creating an iterator, we can choose to use it in a variety of ways. In -Chapter 3, we saw that we can use iterators with `for` loops to execute some -code on each item. The example in Listing 13-14 uses a `for` loop to print out -each value in the vector: +Listing 3-6, we actually used iterators with `for` loops to execute some code +on each item, though we glossed over what the call to `iter` did until now. The +example in Listing 13-14 separates the creation of the iterator from the use of +the iterator in the `for` loop. The iterator is stored in the `v1_iter` +variable, and no iteration takes place at that time. Once the `for` loop is +called using the iterator in `v1_iter`, then each element in the iterator is +used in one iteration of the loop, which prints out each value: -```rust +``` let v1 = vec![1, 2, 3]; let v1_iter = v1.iter(); @@ -986,9 +929,9 @@ for val in v1_iter { } ``` -Listing 13-13: Making use of an iterator in a `for` loop +Listing 13-14: Making use of an iterator in a `for` loop -In languages that don't have iterators provided by their standard libraries, we +In languages that don’t have iterators provided by their standard libraries, we would likely write this same functionality by starting a variable at index 0, using that variable to index into the vector to get a value, and incrementing the variable value in a loop until its value gets up to the total number of @@ -996,39 +939,41 @@ items in the vector. Iterators take care of all of that logic for us, which cuts down on the repetitive code we would have to write and potentially mess up. In addition, the way iterators are implemented gives us more flexibility to use the same logic with many different kinds of sequences, not just data -structures that we can index into like vectors. Let's see how iterators do that. +structures that we can index into like vectors. Let’s see how iterators do that. ### The `Iterator` trait and the `next` method Iterators all implement a trait named `Iterator` that is defined in the standard library. The definition of the trait looks like this: -```rust +``` trait Iterator { type Item; fn next(&mut self) -> Option; + + // methods with default implementations elided } ``` -You'll notice some new syntax that we haven't covered yet: `type Item` and -`Self::Item`, which are defining an *associated type* with this trait. We'll +You’ll notice some new syntax that we haven’t covered yet: `type Item` and +`Self::Item`, which are defining an *associated type* with this trait. We’ll talk about associated types in depth in Chapter 19, but for now, all you need to know is that this code says implementing `Iterator` trait requires that you also define an `Item` type, and this `Item` type is used in the return type of the `next` method. In other words, the `Item` type will be the type of element -that's returned from the iterator. +that’s returned from the iterator. The `next` method is the only method that the `Iterator` trait requires -implementors of the trait to define. `next` returns one item of the iterator +implementers of the trait to define. `next` returns one item of the iterator at a time wrapped in `Some`, and when iteration is over, it returns `None`. -We can call the `next` method on iterators directly if we'd like; Listing 13-14 -has a test that demonstrates the values we'd get on repeated calls to `next` +We can call the `next` method on iterators directly if we’d like; Listing 13-15 +has a test that demonstrates the values we’d get on repeated calls to `next` on the iterator created from the vector: Filename: src/lib.rs -```rust +``` #[test] fn iterator_demonstration() { let v1 = vec![1, 2, 3]; @@ -1042,12 +987,14 @@ fn iterator_demonstration() { } ``` -Listing 13-14: Calling the `next` method on an iterator +Listing 13-15: Calling the `next` method on an iterator Note that we needed to make `v1_iter` mutable: calling the `next` method on an -iterator changes the iterator's state that keeps track of where it is in the +iterator changes the iterator’s state that keeps track of where it is in the sequence. Put another way, this code *consumes*, or uses up, the iterator. Each -call to `next` eats up an item from the iterator. +call to `next` eats up an item from the iterator. We didn’t need to make +`v1_iter` mutable when we used a `for` loop because the `for` loop took +ownership of `v1_iter` and made `v1_iter` mutable behind the scenes. Also note that the values we get from the calls to `next` are immutable references to the values in the vector. The `iter` method produces an iterator @@ -1067,11 +1014,11 @@ catalyst? --> The `Iterator` trait has a number of different methods with default -implementatitons provided for us by the standard library; you can find out all +implementations provided for us by the standard library; you can find out all about these methods by looking in the standard library API documentation for the `Iterator` trait. Some of these methods call the `next` method in their -definition, which is why we're required to implement the `next` method when -implementing thie `Iterator` trait. +definition, which is why we’re required to implement the `next` method when +implementing the `Iterator` trait. src/main.rs:4:1 @@ -1149,20 +1096,20 @@ nothing unless consumed = note: #[warn(unused_must_use)] on by default ``` -The code in Listing 13-16 isn't actually doing anything; the closure we've +The code in Listing 13-17 isn’t actually doing anything; the closure we’ve specified never gets called. The warning reminds us why: iterator adaptors are lazy, and we probably meant to consume the iterator here. In order to fix this warning and consume the iterator to get a useful result, -we're going to use the `collect` method, which we saw briefly in Chapter 12. +we’re going to use the `collect` method, which we saw briefly in Chapter 12. This method consumes the iterator and collects the resulting values into a -data structure. In Listing 13-17, we're going to collect the results of +data structure. In Listing 13-18, we’re going to collect the results of iterating over the iterator returned from the call to `map` into a vector that will contain each item from the original vector incremented by 1: Filename: src/main.rs -```rust +``` let v1: Vec = vec![1, 2, 3]; let v2: Vec<_> = v1.iter().map(|x| x + 1).collect(); @@ -1170,7 +1117,7 @@ let v2: Vec<_> = v1.iter().map(|x| x + 1).collect(); assert_eq!(v2, vec![2, 3, 4]); ``` -Listing 13-17: Calling the `map` method to create a new iterator, then calling +Listing 13-18: Calling the `map` method to create a new iterator, then calling the `collect` method to consume the new iterator and create a vector Because `map` takes a closure, we can specify any operation that we want to @@ -1187,19 +1134,19 @@ which applies the closure? Also, to generalize this discussion a bit, would you ever use iter without map? --> - ### Using Closures that Capture their Environment with Iterators -Now that we've introduced iterators, we can demonstrate a common use of +Now that we’ve introduced iterators, we can demonstrate a common use of closures that capture their environment by using the `filter` iterator adapter. The `filter` method on an iterator takes a closure that takes each item from the iterator and returns a boolean. If the closure returns `true`, the value will be included in the iterator produced by `filter`. If the closure returns -`false`, the value won't be included in the resulting iterator. Listing 13-18 +`false`, the value won’t be included in the resulting iterator. Listing 13-19 demonstrates using `filter` with a closure that captures the `shoe_size` variable from its environment in order to iterate over a collection of `Shoe` struct instances in order to return only shoes that are the specified size: @@ -1239,7 +1186,7 @@ fn filters_by_size() { } ``` -Listing 13-18: Using the `filter` method with a closure that captures +Listing 13-19: Using the `filter` method with a closure that captures `shoe_size` @@ -1249,8 +1196,8 @@ size as parameters. It returns a vector containing only shoes of the specified size. In the body of `shoes_in_my_size`, we call `into_iter` to create an iterator that takes ownership of the vector. Then we call `filter` to adapt that iterator into a new iterator that only contains elements for which the -closure returns `true`. The closure we've specified captures the `shoe_size` -parameter from the environment and uses the value to compare with each shoe's +closure returns `true`. The closure we’ve specified captures the `shoe_size` +parameter from the environment and uses the value to compare with each shoe’s size to only keep shoes that are of the size specified. Finally, calling `collect` gathers the values returned by the adapted iterator into a vector that the function returns. @@ -1268,26 +1215,26 @@ is to you between "a program with an iterator inside" and "whole thing we were making was an iterator"? I don't understand what you mean by these terms so I'm not sure how to clear this up. /Carol --> -We've shown that we can create an iterator by calling `iter`, `into_iter`, or +We’ve shown that we can create an iterator by calling `iter`, `into_iter`, or `iter_mut` on a vector. We can also create iterators from the other collection types in the standard library, such as hash map. Additionally, we can implement the `Iterator` trait in order to create iterators that do anything we want. -As previously mentioned, the only method we're required to provide a definition -for is the `next` method. Once we've done that, we can use all the other +As previously mentioned, the only method we’re required to provide a definition +for is the `next` method. Once we’ve done that, we can use all the other methods that have default implementations provided by the `Iterator` trait on our iterator! -The iterator we're going to create is one that will only ever count from 1 to -5. First, we'll create a struct to hold on to some values, and then we'll make -this struct into an iterator by implementing the `Iterator` trait and use the -values in that implementation. +The iterator we’re going to create is one that will only ever count from 1 +to 5. First, we’ll create a struct to hold on to some values, and then we’ll +make this struct into an iterator by implementing the `Iterator` trait and use +the values in that implementation. -Listing 13-19 has the definition of the `Counter` struct and an associated +Listing 13-20 has the definition of the `Counter` struct and an associated `new` function to create instances of `Counter`: Filename: src/lib.rs -```rust +``` struct Counter { count: u32, } @@ -1299,13 +1246,13 @@ impl Counter { } ``` -Listing 13-19: Defining the `Counter` struct and a `new` function that creates +Listing 13-20: Defining the `Counter` struct and a `new` function that creates instances of `Counter` with an initial value of 0 for `count` - + The `Counter` struct has one field named `count`. This field holds a `u32` @@ -1320,17 +1267,13 @@ next line is telling us? --> does?--> -Next, we're going to implement the `Iterator` trait for our `Counter` type by +Next, we’re going to implement the `Iterator` trait for our `Counter` type by defining the body of the `next` method to specify what we want to happen when -this iterator is used, as shown in Listing 13-20: +this iterator is used, as shown in Listing 13-21: Filename: src/lib.rs -```rust -# struct Counter { -# count: u32, -# } -# +``` impl Iterator for Counter { type Item = u32; @@ -1346,46 +1289,28 @@ impl Iterator for Counter { } ``` -Listing 13-20: Implementing the `Iterator` trait on our `Counter` struct +Listing 13-21: Implementing the `Iterator` trait on our `Counter` struct We set the associated `Item` type for our iterator to `u32`, meaning the -iterator will return `u32` values. Again, don't worry about associated types -yet, we'll be covering them in Chapter 19. We want our iterator to add one to +iterator will return `u32` values. Again, don’t worry about associated types +yet, we’ll be covering them in Chapter 19. We want our iterator to add one to the current state, which is why we initialized `count` to 0: we want our iterator to return one first. If the value of `count` is less than six, `next` will return the current value wrapped in `Some`, but if `count` is six or -higher, our iterator will return `None` +higher, our iterator will return `None`. -#### Using Our `Counter` Iterator's `next` Method +#### Using Our `Counter` Iterator’s `next` Method -Once we've implemented the `Iterator` trait, we have an iterator! Listing 13-21 +Once we’ve implemented the `Iterator` trait, we have an iterator! Listing 13-22 shows a test demonstrating that we can use the iterator functionality our `Counter` struct now has by calling the `next` method on it directly, just like -we did with the iterator created from a vector in Listing 13-14: +we did with the iterator created from a vector in Listing 13-15: Filename: src/lib.rs -```rust -# struct Counter { -# count: u32, -# } -# -# impl Iterator for Counter { -# type Item = u32; -# -# fn next(&mut self) -> Option { -# self.count += 1; -# -# if self.count < 6 { -# Some(self.count) -# } else { -# None -# } -# } -# } -# +``` #[test] fn calling_next_directly() { let mut counter = Counter::new(); @@ -1399,7 +1324,7 @@ fn calling_next_directly() { } ``` -Listing 13-21: Testing the functionality of the `next` method implementation +Listing 13-22: Testing the functionality of the `next` method implementation This test creates a new `Counter` instance in the `counter` variable and then calls `next` repeatedly, verifying that we have implemented the behavior we @@ -1416,8 +1341,8 @@ I added some summary text about what the test is doing. Is this clearer? /Carol #### Using Other `Iterator` Trait Methods on Our Iterator Because we implemented the `Iterator` trait by defining the `next` method, we -can now use any `Iterator` trait method's default implementations that the -standard library has defined, since they all use the `next` method's +can now use any `Iterator` trait method’s default implementations that the +standard library has defined, since they all use the `next` method’s functionality. + At the time, we said not to worry about the inefficient `clone` calls here @@ -1540,8 +1440,8 @@ need to clone the values that we put in the `query` and `filename` fields of With our new knowledge about iterators, we can change the `new` function to take ownership of an iterator as its argument instead of borrowing a slice. -We'll use the interator functionality instead of the code we had that checks -the length of the slice and indexes into specific locations. This will clear up +We’ll use the iterator functionality instead of the code we had that checks the +length of the slice and indexes into specific locations. This will clear up what the `Config::new` function is doing since the iterator will take care of accessing the values. @@ -1559,50 +1459,41 @@ operations that borrow, we can move the `String` values from the iterator into #### Using the Iterator Returned by `env::args` Directly -In your I/O project's *src/main.rs*, let's change the start of the `main` -function from this code that we had in Listing 12-23: +In your I/O project’s *src/main.rs*, let’s change the start of the `main` +function from this code that we had at the end of Chapter 12: -```rust,ignore +``` fn main() { let args: Vec = env::args().collect(); - let mut stderr = std::io::stderr(); let config = Config::new(&args).unwrap_or_else(|err| { - writeln!( - &mut stderr, - "Problem parsing arguments: {}", - err - ).expect("Could not write to stderr"); + eprintln!("Problem parsing arguments: {}", err); process::exit(1); }); + // ...snip... } ``` -To the code in Listing 13-24: +To the code in Listing 13-25: Filename: src/main.rs -```rust,ignore +``` fn main() { - let mut stderr = std::io::stderr(); - let config = Config::new(env::args()).unwrap_or_else(|err| { - writeln!( - &mut stderr, - "Problem parsing arguments: {}", - err - ).expect("Could not write to stderr"); + eprintln!("Problem parsing arguments: {}", err); process::exit(1); }); + // ...snip... } ``` -Listing 13-24: Passing the return value of `env::args` to `Config::new` +Listing 13-25: Passing the return value of `env::args` to `Config::new` The `env::args` function returns an iterator! Rather than collecting the iterator values into a vector and then passing a slice to `Config::new`, now -we're passing ownership of the iterator returned from `env::args` to +we’re passing ownership of the iterator returned from `env::args` to `Config::new` directly. -Next, we need to update the definition of `Config::new`. In your I/O project's -*src/lib.rs*, let's change the signature of `Config::new` to look like Listing -13-25: +Next, we need to update the definition of `Config::new`. In your I/O project’s +*src/lib.rs*, let’s change the signature of `Config::new` to look like Listing +13-26: @@ -1625,33 +1516,29 @@ Filename: src/lib.rs ``` impl Config { - fn new(args: std::env::Args) -> Result { + pub fn new(args: std::env::Args) -> Result { // ...snip... ``` -Listing 13-25: Updating the signature of `Config::new` to expect an iterator +Listing 13-26: Updating the signature of `Config::new` to expect an iterator The standard library documentation for the `env::args` function shows that the -type of the iterator it returns is `std::env::Args`. We've updated the +type of the iterator it returns is `std::env::Args`. We’ve updated the signature of the `Config::new` function so that the parameter `args` has the type `std::env::Args` instead of `&[String]`. #### Using `Iterator` Trait Methods Instead of Indexing -Next, we'll fix the body of `Config::new`. The standard library documentation +Next, we’ll fix the body of `Config::new`. The standard library documentation also mentions that `std::env::Args` implements the `Iterator` trait, so we know -we can call the `next` method on it! Listing 13-26 has the new code: +we can call the `next` method on it! Listing 13-27 has updated the code +from Listing 12-23 to use the `next` method: Filename: src/lib.rs -```rust -# struct Config { -# query: String, -# filename: String, -# } -# +``` impl Config { - fn new(mut args: std::env::Args) -> Result { + pub fn new(mut args: std::env::Args) -> Result { args.next(); let query = match args.next() { @@ -1664,14 +1551,16 @@ impl Config { None => return Err("Didn't get a file name"), }; + let case_sensitive = env::var("CASE_INSENSITIVE").is_err(); + Ok(Config { - query, filename + query, filename, case_sensitive }) } } ``` -Listing 13-26: Changing the body of `Config::new` to use iterator methods +Listing 13-27: Changing the body of `Config::new` to use iterator methods ### Making Code Clearer with Iterator Adaptors The other place in our I/O project we could take advantage of iterators is in -the `search` function, as implemented in Listing 12-19 and reproduced here in -Listing 13-27: +the `search` function, reproduced here in Listing 13-28 as it was at the end of +Chapter 12: Filename: src/lib.rs -```rust,ignore -fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { +``` +pub fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { let mut results = Vec::new(); for line in contents.lines() { @@ -1715,37 +1604,39 @@ fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { } ``` -Listing 13-27: The implementation of the `search` function from Listing 12-19 +Listing 13-28: The implementation of the `search` function from Chapter 12 We can write this code in a much shorter way by using iterator adaptor methods -instead. This also lets us avoid having to have a mutable intermediate -`results` vector. The functional programming style prefers to minimize the -amount of mutable state to make code clearer. Removing the mutable state might -make it easier for us to make a future enhancement to make searching happen in -parallel, since we wouldn't have to manage concurrent access to the `results` -vector. Listing 13-28 shows this change: +instead. This also lets us avoid having a mutable intermediate `results` +vector. The functional programming style prefers to minimize the amount of +mutable state to make code clearer. Removing the mutable state might make it +easier for us to make a future enhancement to make searching happen in +parallel, since we wouldn’t have to manage concurrent access to the `results` +vector. Listing 13-29 shows this change: Filename: src/lib.rs -```rust,ignore -fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { +``` +pub fn search<'a>(query: &str, contents: &'a str) -> Vec<&'a str> { contents.lines() .filter(|line| line.contains(query)) .collect() } ``` -Listing 13-28: Using iterator adaptor methods in the implementation of the +Listing 13-29: Using iterator adaptor methods in the implementation of the `search` function Recall that the purpose of the `search` function is to return all lines in `contents` that contain the `query`. Similarly to the `filter` example in -Listing 13-18, we can use the `filter` adaptor to keep only the lines that +Listing 13-19, we can use the `filter` adaptor to keep only the lines that `line.contains(query)` returns true for. We then collect the matching lines up -into another vector with `collect`. Much simpler! +into another vector with `collect`. Much simpler! Feel free to make the same +change to use iterator methods in the `search_case_insensitive` function as +well. The next logical question is which style you should choose in your own code: -the original implementation in Listing 13-27, or the version using iterators in -Listing 13-28. Most Rust programmers prefer to use the iterator style. It's a +the original implementation in Listing 13-28, or the version using iterators in +Listing 13-29. Most Rust programmers prefer to use the iterator style. It’s a bit tougher to get the hang of at first, but once you get a feel for the various iterator adaptors and what they do, iterators can be easier to understand. Instead of fiddling with the various bits of looping and building new vectors, the code focuses on the high-level objective of the loop. This -abstracts away some of the commonplace code so that it's easier to see the +abstracts away some of the commonplace code so that it’s easier to see the concepts that are unique to this code, like the filtering condition each element in the iterator must pass. But are the two implementations truly equivalent? The intuitive assumption -might be that the more low-level loop will be faster. Let's talk about +might be that the more low-level loop will be faster. Let’s talk about performance. ## Comparing Performance: Loops versus Iterators @@ -1773,24 +1664,24 @@ To determine which to use, we need to know which version of our `search` functions is faster: the version with an explicit `for` loop or the version with iterators. -We ran a benchmark by loading the entire contents of "The Adventures of -Sherlock Holmes" by Sir Arthur Conan Doyle into a `String` and looking for the -word "the" in the contents. Here were the results of the benchmark on the +We ran a benchmark by loading the entire contents of “The Adventures of +Sherlock Holmes” by Sir Arthur Conan Doyle into a `String` and looking for the +word “the” in the contents. Here were the results of the benchmark on the version of `search` using the `for` loop and the version using iterators: -```text +``` test bench_search_for ... bench: 19,620,300 ns/iter (+/- 915,700) test bench_search_iter ... bench: 19,234,900 ns/iter (+/- 657,200) ``` -The iterator version ended up slightly faster! We're not going to go through -the benchmark code here, as the point is not to prove that they're exactly +The iterator version ended up slightly faster! We’re not going to go through +the benchmark code here, as the point is not to prove that they’re exactly equivalent, but to get a general sense of how these two implementations compare -perfromance-wise. For a more comprehensive benchmark, you'd want to check +performance-wise. For a more comprehensive benchmark, you’d want to check various texts of various sizes, different words, words of different lengths, and all kinds of other variations. The point is this: iterators, while a -high-level abstraction, get compiled down to roughly the same code as if you'd -written the lower-level code yourself. Iterators are one of Rust's *zero-cost +high-level abstraction, get compiled down to roughly the same code as if you’d +written the lower-level code yourself. Iterators are one of Rust’s *zero-cost abstractions*, by which we mean using the abstraction imposes no additional runtime overhead in the same way that Bjarne Stroustrup, the original designer and implementer of C++, defines *zero-overhead*: @@ -1799,7 +1690,7 @@ and implementer of C++, defines *zero-overhead*: > don’t use, you don’t pay for. And further: What you do use, you couldn’t hand > code any better. > -> - Bjarne Stroustrup "Foundations of C++" +> - Bjarne Stroustrup “Foundations of C++” This code uses an iterator chain to do some math on three variables in scope: a `buffer` slice of data, an array of 12 `coefficients`, and an amount by which -to shift data in `qlp_shift`. We've declared the variables within this example -but not given them any values; while this code doesn't have much meaning -outside of its context, it's still a concise, real-world example of how Rust +to shift data in `qlp_shift`. We’ve declared the variables within this example +but not given them any values; while this code doesn’t have much meaning +outside of its context, it’s still a concise, real-world example of how Rust translates high-level ideas to low-level code: -```rust,ignore +``` let buffer: &mut [i32]; let coefficients: [i64; 12]; let qlp_shift: i16; @@ -1845,11 +1736,11 @@ pair, we multiply the values together, sum all the results, and shift the bits in the sum `qlp_shift` bits to the right. Calculations in applications like audio decoders often prioritize performance -most highly. Here, we're creating an iterator, using two adaptors, then +most highly. Here, we’re creating an iterator, using two adaptors, then consuming the value. What assembly code would this Rust code compile to? Well, -as of this writing, it compiles down to the same assembly you'd write by hand. -There's no loop at all corresponding to the iteration over the values in -`coefficients`: Rust knows that there are twelve iterations, so it "unrolls" +as of this writing, it compiles down to the same assembly you’d write by hand. +There’s no loop at all corresponding to the iteration over the values in +`coefficients`: Rust knows that there are twelve iterations, so it “unrolls” the loop. Unrolling is an optimization that removes the overhead of the loop controlling code and instead generates repetitive code for each iteration of the loop. @@ -1857,21 +1748,21 @@ the loop. -All of the coefficients get stored in registers, which means it's very fast to +All of the coefficients get stored in registers, which means it’s very fast to access the values. There are no bounds checks on the array access at runtime. All these optimizations Rust is able to apply make the resulting code extremely efficient. Now that you know this, go use iterators and closures without fear! They make -code feel higher-level, but don't impose a runtime performance penalty for +code feel higher-level, but don’t impose a runtime performance penalty for doing so. ## Summary Closures and iterators are Rust features inspired by functional programming -language ideas. They contribute to Rust's ability to clearly express high-level +language ideas. They contribute to Rust’s ability to clearly express high-level ideas, at low level performance. The implementations of closures and iterators -are such that runtime performance is not affected. This is part of Rust's goal +are such that runtime performance is not affected. This is part of Rust’s goal to strive to provide zero-cost abstractions. -Now that we've improved the expressiveness of our I/O project, let's look at +Now that we’ve improved the expressiveness of our I/O project, let’s look at some more features of `cargo` that would help us get ready to share the project with the world. diff --git a/src/doc/book/second-edition/nostarch/chapter14.md b/src/doc/book/second-edition/nostarch/chapter14.md index 2ce61b041a..dfdc027ab4 100644 --- a/src/doc/book/second-edition/nostarch/chapter14.md +++ b/src/doc/book/second-edition/nostarch/chapter14.md @@ -3,47 +3,93 @@ # More about Cargo and Crates.io -We've used some features of Cargo in this book so far, but only the most basic -ones. We've used Cargo to build, run, and test our code, but it can do a lot -more. Let's go over some of its other features now. Cargo can do even more than -what we will cover in this chapter; for a full explanation, see its -documentation. +So far we’ve used only the most basic features of Cargo to build, run, and test +our code, but it can do a lot more. Here we’ll go over some of its other, more +advanced features to show you how to: -We're going to cover: +* Customize your build through release profiles +* Publish libraries on crates.io +* Organize larger projects with workspaces +* Install binaries from crates.io +* Extend Cargo with your own custom commands -* Customizing your build through release profiles -* Publishing libraries on crates.io -* Organizing larger projects with workspaces -* Installing binaries from crates.io -* Extending Cargo with your own custom commands +Cargo can do even more than what we can cover in this chapter too, so for a +full explanation, see its documentation at *http://doc.rust-lang.org/cargo/*. -## Release profiles + + -Cargo supports a notion of *release profiles*. These profiles control various -options for compiling your code and let you configure each profile -independently of the others. You've seen a hint of this feature in the output -of your builds: +## Customizing Builds with Release Profiles -```text +In Rust *release profiles* are pre-defined, and customizable, profiles with +different configurations, to allow the programmer more control over various +options for compiling your code. Each profile is configured independently of +the others. + + + + +Cargo has four profiles defined with good default configurations for each use +case. Cargo uses the different profiles based on which command you’re running. +The commands correspond to the profiles as shown in Table 14-1: + + + + +| Command | Profile | +|-------------------------|-----------| +| `cargo build` | `dev` | +| `cargo build --release` | `release` | +| `cargo test` | `test` | +| `cargo doc` | `doc` | + +Table 14-1: Which profile is used when you run different Cargo commands + +This may be familiar from the output of your builds, which shows the profile +used in the build: + + + + +``` $ cargo build - Finished debug [unoptimized + debuginfo] target(s) in 0.0 secs + Finished dev [unoptimized + debuginfo] target(s) in 0.0 secs $ cargo build --release Finished release [optimized] target(s) in 0.0 secs ``` -The "debug" and "release" notifications here indicate that the compiler is -using different profiles. Cargo supports four profiles: +The “dev” and “release” notifications here indicate that the compiler is +using different profiles. + + + -* `dev`: used for `cargo build` -* `release` used for `cargo build --release` -* `test` used for `cargo test` -* `doc` used for `cargo doc` +### Customizing Release Profiles -We can customize our `Cargo.toml` file with `[profile.*]` sections to tweak -various compiler options for these profiles. For example, here's one of the -default options for the `dev` and `release` profiles: + + -```toml +Cargo has default settings for each of the profiles that apply when there +aren’t any `[profile.*]` sections in the project’s *Cargo.toml* file. By adding +`[profile.*]` sections for any profile we want to customize, we can choose to +override any subset of the default settings. For example, here are the default +values for the `opt-level` setting for the `dev` and `release` profiles: + +``` [profile.dev] opt-level = 0 @@ -52,58 +98,78 @@ opt-level = 3 ``` The `opt-level` setting controls how many optimizations Rust will apply to your -code. The setting goes from zero to three. Applying more optimizations takes -more time. When you're compiling very often in development, you'd usually want -compiling to be fast at the expense of the resulting code running slower. When -you're ready to release, it's better to spend more time compiling the one time -that you build your code to trade off for code that will run faster every time -you use that compiled code. - -We could override these defaults by changing them in `Cargo.toml`. For example, -if we wanted to use optimization level 1 in development: +code, with a range of zero to three. Applying more optimizations makes +compilation take longer, so if you’re in development and compiling very often, +you’d want compiling to be fast at the expense of the resulting code running +slower. That’s why the default `opt-level` for `dev` is `0`. When you’re ready +to release, it’s better to spend more time compiling. You’ll only be compiling +in release mode once, and running the compiled program many times, so release +mode trades longer compile time for code that runs faster. That’s why the +default `opt-level` for the `release` profile is `3`. + +We can choose to override any default setting by adding a different value for +them in *Cargo.toml*. If we wanted to use optimization level 1 in the +development profile, for example, we can add these two lines to our project’s +*Cargo.toml*: + + + + +Filename: Cargo.toml -```toml +``` [profile.dev] opt-level = 1 ``` -This overrides the default setting of `0`, and now our development builds will -use more optimizations. Not as much as a release build, but a little bit more. +This overrides the default setting of `0`. Now when we run `cargo build`, Cargo +will use the defaults for the `dev` profile plus our customization to +`opt-level`. Because we set `opt-level` to `1`, Cargo will apply more +optimizations than the default, but not as many as a release build. -For the full list of settings and the defaults for each profile, see Cargo's -documentation. at *http://doc.crates.io/* +For the full list of configuration options and defaults for each profile, see +Cargo’s documentation at *http://doc.rust-lang.org/cargo/*. ## Publishing a Crate to Crates.io -We've added crates from crates.io as dependencies of our project. We can choose -to share our code for other people to use as well. Crates.io distributes the -source code of your packages, so it is primarily used to distribute code that's -open source. +We’ve used packages from crates.io as dependencies of our project, but you can +also share your code for other people to use by publishing your own packages. +Crates.io distributes the source code of your packages, so it primarily hosts +code that’s open source. -Rust and Cargo have some features that can make your published package easier -for people to find and use. We'll talk about some of those features, then cover -how to publish a package. +Rust and Cargo have features that help make your published package easier for +people to find and use. We’ll talk about some of those features, then cover how +to publish a package. -### Documentation Comments +### Making Useful Documentation Comments -In Chapter 3, we saw comments in Rust that start with `//`. Rust also has a -second kind of comment: the *documentation comment*. While comments can be -useful if someone is reading your code, you can generate HTML documentation -that displays the contents of documentation comments for public API items meant -for someone who's interested in knowing how to *use* your crate, as opposed to -how your crate is *implemented*. Note that documentation is only generated for -library crates, since binary crates don't have a public API that people need to -know how to use. +Accurately documenting your packages will help other users know how and when to +use them, so it’s worth spending some time to write documentation. In Chapter +3, we discussed how to comment Rust code with `//`. Rust also has particular +kind of comment for documentation, known conveniently as *documentation +comments*, that will generate HTML documentation. The HTML displays the +contents of documentation comments for public API items, intended for +programmers interested in knowing how to *use* your crate, as opposed to how +your crate is *implemented*. -Documentation comments use `///` instead of `//` and support Markdown notation -inside. They go just before the item they are documenting. Here's documentation -comments for an `add_one` function: + + -
+Documentation comments use `///` instead of `//` and support Markdown notation +for formatting the text if you’d like. You place documentation comments just +before the item they are documenting. Listing 14-2 shows documentation comments +for an `add_one` function in a crate named `my_crate`: -Filename: src/lib.rs +Filename: src/lib.rs -````rust +``` /// Adds one to the number given. /// /// # Examples @@ -111,85 +177,185 @@ comments for an `add_one` function: /// ``` /// let five = 5; /// -/// assert_eq!(6, add_one(5)); -/// # fn add_one(x: i32) -> i32 { -/// # x + 1 -/// # } +/// assert_eq!(6, my_crate::add_one(5)); /// ``` pub fn add_one(x: i32) -> i32 { x + 1 } -```` +``` + +Listing 14-2: A documentation comment for a function -
+ + -Listing 14-1: A documentation comment for a function +Here, we give a description of what the `add_one` function does, then start a +section with the heading “Examples”, and code that demonstrates how to use the +`add_one` function. We can generate the HTML documentation from this +documentation comment by running `cargo doc`. This command runs the `rustdoc` +tool distributed with Rust and puts the generated HTML documentation in the +*target/doc* directory. -
-
+For convenience, running `cargo doc --open` will build the HTML for your +current crate’s documentation (as well as the documentation for all of your +crate’s dependencies) and open the result in a web browser. Navigate to the +`add_one` function and you’ll see how the text in the documentation comments +gets rendered, shown here in Figure 14-3: -`cargo doc` runs a tool distributed with Rust, `rustdoc`, to generate HTML -documentation from these comments. To try this out locally, you can run `cargo -doc --open`, which will build the documentation for your current crate (as well -as all of your crate's dependencies) and open it in a web browser. Navigate to -the `add_one` function and you'll see how the text in the documentation -comments gets rendered. +Rendered HTML documentation for the `add_one` function of `my_crate` + +Figure 14-3: HTML documentation for the `add_one` function + + + + +#### Commonly Used Sections + +We used the `# Examples` markdown heading in Listing 14-2 to create a section +in the HTML with the title “Examples”. Some other sections that crate authors +commonly use in their documentation include: + +- Panics: The scenarios in which this function could `panic!`. Callers of this + function who don’t want their programs to panic should make sure that they + don’t call this function in these situations. +- Errors: If this function returns a `Result`, describing the kinds of errors + that might occur and what conditions might cause those errors to be returned + can be helpful to callers so that they can write code to handle the different + kinds of errors in different ways. +- Safety: If this function uses `unsafe` code (which we will discuss in Chapter + 19), there should be a section covering the invariants that this function + expects callers to uphold in order for the code in `unsafe` blocks to + function correctly. + +Most documentation comment sections don’t need all of these sections, but this +is a good list to check to remind you of the kinds of things that people +calling your code will be interested in knowing about. + +#### Documentation Comments as Tests Adding examples in code blocks in your documentation comments is a way to -clearly demonstrate how to use your library. There's an additional bonus reason -to do this: `cargo test` will run the code examples in your documentation as -tests! Nothing is better than documentation with examples. Nothing is worse -than examples that don't actually work because the code has changed since the +clearly demonstrate how to use your library, but it has an additional bonus: +running `cargo test` will run the code examples in your documentation as tests! +Nothing is better than documentation with examples. Nothing is worse than +examples that don’t actually work because the code has changed since the documentation has been written. Try running `cargo test` with the documentation -for the `add_one` function in Listing 14-1; you'll see a section in the test -results like this: +for the `add_one` function like in Listing 14-2; you should see a section in +the test results like this: -```test - Doc-tests add-one +``` + Doc-tests my_crate running 1 test -test add_one_0 ... ok +test src/lib.rs - add_one (line 5) ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ``` -Try changing the function or the example to see that `cargo test` will catch -that the example no longer works! +Now try changing either the function or the example so that the `assert_eq!` in +the example will panic. Run `cargo test` again, and you’ll see that the doc +tests catch that the example and the code are out of sync from one another! -There's another style of doc comment, `//!`, to comment containing items (e.g. -crates, modules or functions), instead of the items following it. These are -typically used inside the crate root (lib.rs) or a module's root (mod.rs) to -document the crate or the module as a whole, respectively. Here's the -documentation within the `libstd` module that contains the entire standard -library: +#### Commenting Contained Items + + + + +There’s another style of doc comment, `//!`, that adds documentation to the +item that contains the comments, rather than adding documentation to the items +following the comments. These are typically used inside the crate root file +(*src/lib.rs*) or inside a module’s root (*mod.rs*) to document the crate or +the module as a whole. + +For example, if we wanted to add documentation that described the purpose of +the `my_crate` crate that contains the `add_one` function, we can add +documentation comments that start with `//!` to the beginning of *src/lib.rs* +as shown in Listing 14-4: + +Filename: src/lib.rs ``` -//! # The Rust Standard Library +//! # My Crate //! -//! The Rust Standard Library provides the essential runtime -//! functionality for building portable Rust software. +//! `my_crate` is a collection of utilities to make performing certain +//! calculations more convenient. + +/// Adds one to the number given. +// ...snip... ``` +Listing 14-4: Documentation for the `my_crate` crate as a whole + +Notice there isn’t any code after the last line that begins with `//!`. Because +we started the comments with `//!` instead of `///`, we’re documenting the item +that contains this comment rather than an item that follows this comment. In +this case, the item that contains this comment is the *src/lib.rs* file, which +is the crate root. These comments describe the entire crate. + +If we run `cargo doc --open`, we’ll see these comments displayed on the front +page of the documentation for `my_crate` above the list of public items in the +crate, as shown in Figure 14-5: + +Rendered HTML documentation with a comment for the crate as a whole + +Figure 14-5: Rendered documentation for `my_crate` including the comment +describing the crate as a whole + + + + +Documentation comments within items are useful for describing crates and +modules especially. Use them to talk about the purpose of the container overall +to help users of your crate understand your organization. + ### Exporting a Convenient Public API with `pub use` In Chapter 7, we covered how to organize our code into modules with the `mod` keyword, how to make items public with the `pub` keyword, and how to bring -items into a scope with the `use` keyword. When publishing a crate for people -unfamiliar with the implementation to use, it's worth taking time to consider -if the structure of your crate that's useful for you as you're developing is -what would be useful for people depending on your crate. If the structure isn't -convenient to use from another library, you don't have to rearrange your -internal organization: you can choose to re-export items to make a different -public structure with `pub use`. - -For example, say that we made a library named `art` consisting of a `kinds` -module containing an enum named `Color` and a `utils` module containing a -function named `mix` as shown in Listing 14-2: - -
-Filename: src/lib.rs - -```rust +items into a scope with the `use` keyword. The structure that makes sense to +you while you’re developing a crate may not be very convenient for your users, +however. You may wish to organize your structs in a hierarchy containing +multiple levels, but people that want to use a type you’ve defined deep in the +hierarchy might have trouble finding out that those types exist. They might +also be annoyed at having to type `use +my_crate::some_module::another_module::UsefulType;` rather than `use +my_crate::UsefulType;`. + + + + +The structure of your public API is a major consideration when publishing a +crate. People who use your crate are less familiar with the structure than you +are, and might have trouble finding the pieces they want to use if the module +hierarchy is large. + +The good news is that, if the structure *isn’t* convenient for others to use +from another library, you don’t have to rearrange your internal organization: +you can choose to re-export items to make a public structure that’s different +to your private structure, using `pub use`. Re-exporting takes a public item in +one location and makes it public in another location as if it was defined in +the other location instead. + + + + +For example, say we made a library named `art` for modeling artistic concepts. +Within this library is a `kinds` module containing two enums named +`PrimaryColor` and `SecondaryColor` and a `utils` module containing a function +named `mix` as shown in Listing 14-6: + +Filename: src/lib.rs + +``` //! # Art //! //! A library for modeling artistic concepts. @@ -221,21 +387,29 @@ pub mod utils { } ``` -
- -Listing 14-2: An `art` library with items organized into `kinds` and `utils` +Listing 14-6: An `art` library with items organized into `kinds` and `utils` modules -
-
+The front page of the documentation for this crate generated by `cargo doc` +would look like Figure 14-7: + +Rendered documentation for the `art` crate that lists the `kinds` and `utils` modules + +Figure 14-7: Front page of the documentation for `art` +that lists the `kinds` and `utils` modules -In order to use this library, another crate would have `use` statements as in -Listing 14-3: +Note that the `PrimaryColor` and `SecondaryColor` types aren’t listed on the +front page, nor is the `mix` function. We have to click on `kinds` and `utils` +in order to see them. -
-Filename: src/main.rs +Another crate depending on this library would need `use` statements that import +the items from `art` including specifying the module structure that’s currently +defined. Listing 14-8 shows an example of a crate that uses the `PrimaryColor` +and `mix` items from the `art` crate: -```rust,ignore +Filename: src/main.rs + +``` extern crate art; use art::kinds::PrimaryColor; @@ -248,26 +422,32 @@ fn main() { } ``` -
- -Listing 14-3: A program using the `art` crate's items with its internal -structure exported +Listing 14-8: A crate using the `art` crate’s items with its internal structure +exported -
-
+ + -Users of this crate shouldn't need to know that `PrimaryColor` and -`SecondaryColor` are in the `kinds` module, and `mix` is in the `utils` module; -that structure might be useful for internal organization but doesn't have much -meaning from the outside looking in. +The author of the code in Listing 14-8 that uses the `art` crate had to figure +out that `PrimaryColor` is in the `kinds` module and `mix` is in the `utils` +module. The module structure of the `art` crate is more relevant to developers +working on the `art` crate than developers using the `art` crate. The internal +structure that organizes parts of the crate into the `kinds` module and the +`utils` module doesn’t add any useful information to someone trying to +understand how to use the `art` crate. The `art` crate’s module structure adds +confusion in having to figure out where to look and inconvenience in having to +specify the module names in the `use` statements. -To change this, we can add the following `pub use` statements to the code from -Listing 14-2 to re-export the types at the top level, as shown in Listing 14-4: +To remove the internal organization from the public API, we can take the `art` +crate code from Listing 14-6 and add `pub use` statements to re-export the +items at the top level, as shown in Listing 14-9: -
-Filename: src/lib.rs +Filename: src/lib.rs -```rust +``` //! # Art //! //! A library for modeling artistic concepts. @@ -278,26 +458,33 @@ pub use utils::mix; pub mod kinds { // ...snip... +} + +pub mod utils { + // ...snip... +} ``` -
+Listing 14-9: Adding `pub use` statements to re-export items -Listing 14-4: Adding `pub use` statements to re-export items + -
-
+The API documentation generated with `cargo doc` for this crate will now list +and link re-exports on the front page as shown in Figure 14-10, which makes +these types easier to find. - +Rendered documentation for the `art` crate with the re-exports on the front page + +Figure 14-10: Front page of the documentation for `art` that lists the +re-exports -Re-exports are listed and linked on the front page of the crate's API -documentation. Users of the `art` crate can still see and choose to use the -internal structure as in Listing 14-3, or they can use the more convenient -structure from Listing 14-4, as shown in Listing 14-5: +Users of the `art` crate can still see and choose to use the internal structure +as in Listing 14-8, or they can use the more convenient structure from Listing +14-9, as shown in Listing 14-11: -
-Filename: src/main.rs +Filename: src/main.rs -```rust,ignore +``` extern crate art; use art::PrimaryColor; @@ -308,84 +495,112 @@ fn main() { } ``` -
- -Listing 14-5: Using the re-exported items from the `art` crate - -
-
+Listing 14-11: A program using the re-exported items from the `art` crate -Creating a useful public API structure is more of an art than a science. -Choosing `pub use` gives you flexibility in how you expose your crate's -internal structure to users. Take a look at some of the code of crates you've -installed to see if their internal structure differs from their public API. - -### Before Your First Publish - -Before being able to publish any crates, you'll need to create an account on -crates.io at *https://crates.io* and get an API token. To do so, visit the home page at *https://crates.io* -and log in via a GitHub account. A GitHub account is a requirement for now, but -the site might support other ways of creating an account in the future. Once -you're logged in, visit your Account Settings at *https://crates.io/me* page and run the `cargo login` -command with the API key as the page specifies, which will look something like -this: - +In cases where there are many nested modules, re-exporting the types at the top +level with `pub use` can make a big difference in the experience of people who +use the crate. + +Creating a useful public API structure is more of an art than a science, and +you can iterate to find the API that works best for your users. Choosing `pub +use` gives you flexibility in how you structure your crate internally, and +decouples that internal structure with what you present to your users. Take a +look at some of the code of crates you’ve installed to see if their internal +structure differs from their public API. + +### Setting up a Crates.io Account + +Before you can publish any crates, you need to create an account on crates.io +and get an API token. To do so, visit the home page at *https://crates.io* and +log in via a GitHub account---the GitHub account is a requirement for now, but +the site may support other ways of creating an account in the future. Once +you’re logged in, visit your account settings at *https://crates.io/me* and +retrieve your API key. Then run the `cargo login` command with your API key, +like this: -```text +``` $ cargo login abcdefghijklmnopqrstuvwxyz012345 ``` This command will inform Cargo of your API token and store it locally in -*~/.cargo/config*. Note that this token is a **secret** and should not be -shared with anyone else. If it gets shared with anyone for any reason, you -should regenerate it immediately. +*~/.cargo/credentials*. Note that this token is a **secret** and should not be +shared with anyone else. If it is shared with anyone for any reason, you should +revoke it and generate a new token on Crates.io. ### Before Publishing a New Crate -First, your crate will need a unique name. While you're working on a crate -locally, you may name a crate whatever you'd like, but crate names on -crates.io at *https://crates.io* are allocated on a first-come-first- serve basis. Once a crate name -is taken, it cannot be used for another crate, so check on the site that the -name you'd like is available. +Now you have an account, and let’s say you already have a crate you want to +publish. Before publishing, you’ll need to add some metadata to your crate by +adding it to the `[package]` section of the crate’s *Cargo.toml*. + + + + +Your crate will first need a unique name. While you’re working on a crate +locally, you may name a crate whatever you’d like. However, crate names on +Crates.io are allocated on a first-come-first-serve basis. Once a crate name is +taken, no one else may publish a crate with that name. Search for the name +you’d like to use on the site to find out if it has been taken. If it hasn’t, +edit the name in *Cargo.toml* under `[package]` to have the name you want to +use for publishing like so: + +``` +[package] +name = "guessing_game" +``` -If you try to publish a crate as generated by `cargo new`, you'll get a warning -and then an error: +Even if you’ve chosen a unique name, if you try to run `cargo publish` to +publish the crate at this point, you’ll get a warning and then an error: -```text +``` $ cargo publish Updating registry `https://github.com/rust-lang/crates.io-index` warning: manifest has no description, license, license-file, documentation, homepage or repository. ...snip... error: api errors: missing or empty metadata fields: description, license. -Please see http://doc.crates.io/manifest.html#package-metadata for how to -upload metadata ``` -We can include more information about our package in *Cargo.toml*. Some of -these fields are optional, but a description and a license are required in -order to publish so that people will know what your crate does and under what -terms they may use it. +This is because we’re missing some crucial information: a description and +license are required so that people will know what your crate does and under +what terms they may use it. To rectify this error, we need to include this +information in *Cargo.toml*. -The description appears with your crate in search results and on your crate's -page. Descriptions are usually a sentence or two. The `license` field takes a -license identifier value, and the possible values have been specified by the -Linux Foundation's Software Package Data Exchange (SPDX) at *http://spdx.org/licenses/*. If you would -like to use a license that doesn't appear there, instead of the `license` key, -you can use `license-file` to specify the name of a file in your project that -contains the text of the license you want to use. +Make a description that’s just a sentence or two, as it will appear with your +crate in search results and on your crate’s page. For the `license` field, you +need to give a *license identifier value*. The Linux Foundation’s Software +Package Data Exchange (SPDX) at *http://spdx.org/licenses/* lists the +identifiers you can use for this value. For example, to specify that you’ve +licensed your crate using the MIT License, add the `MIT` identifier: + +``` +[package] +name = "guessing_game" +license = "MIT" +``` + + + + +If you want to use a license that doesn’t appear in the SPDX, you need to place +the text of that license in a file, include the file in your project, then use +`license-file` to specify the name of that file instead of using the `license` +key. Guidance on which license is right for your project is out of scope for this book. Many people in the Rust community choose to license their projects in the -same way as Rust itself, with a dual license of `MIT/Apache-2.0`, which -demonstrates that you can specify multiple license identifiers separated by a -slash. So the *Cargo.toml* for a project that is ready to publish might look -like this: +same way as Rust itself, with a dual license of `MIT/Apache-2.0`---this +demonstrates that you can also specify multiple license identifiers separated +by a slash. +So, with a unique name, the version, and author details that `cargo new` added +when you created the crate, your description, and the license you chose added, +the *Cargo.toml* for a project that’s ready to publish might look like this: -```toml +``` [package] name = "guessing_game" version = "0.1.0" @@ -396,134 +611,160 @@ license = "MIT/Apache-2.0" [dependencies] ``` -Be sure to check out the documentation on crates.io at *http://doc.crates.io/manifest.html#package-metadata* that -describes other metadata you can specify to ensure your crate can be discovered -and used more easily! - +Cargo's documentation at *http://doc.rust-lang.org/cargo/* describes other +metadata you can specify to ensure your crate can be discovered and used more +easily! ### Publishing to Crates.io -Now that we've created an account, saved our API token, chosen a name for our -crate, and specified the required metadata, we're ready to publish! Publishing -a crate is when a specific version is uploaded to be hosted on crates.io. +Now that you’ve created an account, saved your API token, chosen a name for +your crate, and specified the required metadata, you’re ready to publish! +Publishing a crate uploads a specific version to crates.io for others to use. -Take care when publishing a crate, because a publish is **permanent**. The -version can never be overwritten, and the code cannot be deleted. However, -there is no limit to the number of versions which can be published. +Take care when publishing a crate, because a publish is *permanent*. The +version can never be overwritten, and the code cannot be deleted. One major +goal of Crates.io is to act as a permanent archive of code so that builds of +all projects that depend on crates from Crates.io will continue to work. +Allowing deletion of versions would make fulfilling that goal impossible. +However, there is no limit to the number of versions of a crate you can publish. -Let's run the `cargo publish` command, which should succeed this time since -we've now specified the required metadata: +Let’s run the `cargo publish` command again. It should succeed now: -```text +``` $ cargo publish Updating registry `https://github.com/rust-lang/crates.io-index` Packaging guessing_game v0.1.0 (file:///projects/guessing_game) Verifying guessing_game v0.1.0 (file:///projects/guessing_game) Compiling guessing_game v0.1.0 (file:///projects/guessing_game/target/package/guessing_game-0.1.0) - Finished debug [unoptimized + debuginfo] target(s) in 0.19 secs + Finished dev [unoptimized + debuginfo] target(s) in 0.19 secs Uploading guessing_game v0.1.0 (file:///projects/guessing_game) ``` -Congratulations! You've now shared your code with the Rust community, and -anyone can easily add your crate as a dependency to their project. +Congratulations! You’ve now shared your code with the Rust community, and +anyone can easily add your crate as a dependency of their project. ### Publishing a New Version of an Existing Crate -When you've made changes to your crate and are ready to release a new version, -change the `version` value specified in your *Cargo.toml*. Use the Semantic -Versioning rules at *http://semver.org/* to decide what an appropriate next version number is -based on the kinds of changes you've made. Then run `cargo publish` to upload -the new version. +When you’ve made changes to your crate and are ready to release a new version, +you change the `version` value specified in your *Cargo.toml* and republish. +Use the Semantic Versioning rules at *http://semver.org/* to decide what an appropriate next +version number is based on the kinds of changes you’ve made. Then run `cargo +publish` to upload the new version. ### Removing Versions from Crates.io with `cargo yank` -Occasions may arise where you publish a version of a crate that actually ends -up being broken for one reason or another, such as a syntax error or forgetting -to include a file. For situations such as this, Cargo supports *yanking* a -version of a crate. - -Marking a version of a crate as yanked means that no projects will be able to -start depending on that version, but all existing projects that depend on that -version will continue to be allowed to download and depend on that version. One -of the major goals of crates.io is to act as a permanent archive of code so -that builds of all projects will continue to work, and allowing deletion of a -version would go against this goal. Essentially, a yank means that all projects -with a *Cargo.lock* will not break, while any future *Cargo.lock* files -generated will not use the yanked version. - -A yank **does not** delete any code. The yank feature is not intended for -deleting accidentally uploaded secrets, for example. If that happens, you must -reset those secrets immediately. +While you can’t remove previous versions of a crate, you can prevent any future +projects from adding them as a new dependency. This is useful when a version of +a crate ends up being broken for one reason or another. For situations such as +this, Cargo supports *yanking* a version of a crate. + +Yanking a version prevents new projects from starting to depend on that +version while allowing all existing projects that depend on it to continue to +download and depend on that version. Essentially, a yank means that all +projects with a *Cargo.lock* will not break, while any future *Cargo.lock* +files generated will not use the yanked version. To yank a version of a crate, run `cargo yank` and specify which version you want to yank: -```text +``` $ cargo yank --vers 1.0.1 ``` You can also undo a yank, and allow projects to start depending on a version again, by adding `--undo` to the command: -```text +``` $ cargo yank --vers 1.0.1 --undo ``` +A yank *does not* delete any code. The yank feature is not intended for +deleting accidentally uploaded secrets, for example. If that happens, you must +reset those secrets immediately. + ## Cargo Workspaces In Chapter 12, we built a package that included both a binary crate and a -library crate. But what if the library crate continues to get bigger and we -want to split our package up further into multiple library crates? As packages -grow, separating out major components can be quite useful. In this situation, -Cargo has a feature called *workspaces* that can help us manage multiple -related packages that are developed in tandem. +library crate. You may find, as your project develops, that the library crate +continues to get bigger and you want to split your package up further into +multiple library crates. In this situation, Cargo has a feature called +*workspaces* that can help manage multiple related packages that are developed +in tandem. A *workspace* is a set of packages that will all share the same *Cargo.lock* -and output directory. Let's make a project using a workspace where the code -will be trivial so that we can concentrate on the structure of a workspace. -We'll have a binary that uses two libraries: one that will provide an `add_one` -method and a second that will provide an `add_two` method. Let's start by -creating a new crate for the binary: +and output directory. Let’s make a project using a workspace, using trivial +code so we can concentrate on the structure of a workspace. We’ll have a binary +that uses two libraries: one library that will provide an `add_one` function +and a second library that will provide an `add_two` function. These three +crates will all be part of the same workspace. We’ll start by creating a new +crate for the binary: -```text +``` $ cargo new --bin adder Created binary (application) `adder` project $ cd adder ``` -We need to modify the binary package's *Cargo.toml* to tell Cargo the `adder` -package is a workspace. Add this at the bottom of the file: +We need to modify the binary package’s *Cargo.toml* and add a `[workspace]` +section to tell Cargo the `adder` package is a workspace. Add this at the +bottom of the file: -```toml +``` [workspace] ``` Like many Cargo features, workspaces support convention over configuration: we -don't need to say anything more than this as long as we follow the convention. -The convention is that any crates that we depend on as sub-directories will be -part of the workspace. Let's add a path dependency to the `adder` crate by -changing the `[dependencies]` section of *Cargo.toml* to look like this: +don’t need to add anything more than this to *Cargo.toml* to define our +workspace as long as we follow the convention. + + + + +### Specifying Workspace Dependencies + +The workspace convention says any crates in any subdirectories that the +top-level crate depends on are part of the workspace. Any crate, whether in a +workspace or not, can specify that it has a dependency on a crate in a local +directory by using the `path` attribute on the dependency specification in +*Cargo.toml*. If a crate has the `[workspace]` key and we specify path +dependencies where the paths are subdirectories of the crate’s directory, those +dependent crates will be considered part of the workspace. Let’s specify in the +*Cargo.toml* for the top-level `adder` crate that it will have a dependency on +an `add-one` crate that will be in the `add-one` subdirectory, by changing +*Cargo.toml* to look like this: + + + -```toml +``` [dependencies] add-one = { path = "add-one" } ``` -If we add dependencies that don't have a `path` specified, those will be normal -dependencies that aren't in this workspace. +If we add dependencies to *Cargo.toml* that don’t have a `path` specified, +those dependencies will be normal dependencies that aren’t in this workspace +and are assumed to come from Crates.io. -Next, generate the `add-one` crate within the `adder` directory: +### Creating the Second Crate in the Workspace -```text + + + +Next, while in the `adder` directory, generate an `add-one` crate: + +``` $ cargo new add-one Created library `add-one` project ``` Your `adder` directory should now have these directories and files: -```text +``` ├── Cargo.toml ├── add-one │   ├── Cargo.toml @@ -533,21 +774,24 @@ Your `adder` directory should now have these directories and files: └── main.rs ``` -In *add-one/src/lib.rs*, let's add an implementation of an `add_one` function: +In *add-one/src/lib.rs*, let’s add an `add_one` function: -Filename: add-one/src/lib.rs +Filename: add-one/src/lib.rs -```rust +``` pub fn add_one(x: i32) -> i32 { x + 1 } ``` -Open up *src/main.rs* for `adder` and add an `extern crate` line to bring the -new `add-one` library crate into scope, and change the `main` function to use -the `add_one` function: + + + +Open up *src/main.rs* for `adder` and add an `extern crate` line at the top of +the file to bring the new `add-one` library crate into scope. Then change the +`main` function to call the `add_one` function, as in Listing 14-12: -```rust,ignore +``` extern crate add_one; fn main() { @@ -556,36 +800,77 @@ fn main() { } ``` -Let's build it! +Listing 14-12: Using the `add-one` library crate from the `adder` crate -```text +Let’s build the `adder` crate by running `cargo build` in the *adder* directory! + +``` $ cargo build Compiling add-one v0.1.0 (file:///projects/adder/add-one) Compiling adder v0.1.0 (file:///projects/adder) - Finished debug [unoptimized + debuginfo] target(s) in 0.68 secs + Finished dev [unoptimized + debuginfo] target(s) in 0.68 secs ``` -Note that running `cargo build` in the *adder* directory built both that crate -and the `add-one` crate in *adder/add-one*, but created only one *Cargo.lock* -and one *target* directory, both in the *adder* directory. See if you can add -an `add-two` crate in the same way. +Note that this builds both the `adder` crate and the `add-one` crate in +*adder/add-one*. Now your *adder* directory should have these files: -Let's now say that we'd like to use the `rand` crate in our `add-one` crate. -As usual, we'll add it to the `[dependencies]` section in the `Cargo.toml` for -that crate: +``` +├── Cargo.lock +├── Cargo.toml +├── add-one +│   ├── Cargo.toml +│   └── src +│   └── lib.rs +├── src +│   └── main.rs +└── target +``` -Filename: add-one/Cargo.toml +The workspace has one *target* directory at the top level; *add-one* doesn’t +have its own *target* directory. Even if we go into the `add-one` directory and +run `cargo build`, the compiled artifacts end up in *adder/target* rather than +*adder/add-one/target*. The crates in a workspace depend on each other. If each +crate had its own *target* directory, each crate in the workspace would have to +recompile each other crate in the workspace in order to have the artifacts in +its own *target* directory. By sharing one *target* directory, the crates in +the workspace can avoid rebuilding the other crates in the workspace more than +necessary. + + + + +#### Depending on an External Crate in a Workspace + +Also notice the workspace only has one *Cargo.lock*, rather than having a +top-level *Cargo.lock* and *add-one/Cargo.lock*. This ensures that all crates +are using the same version of all dependencies. If we add the `rand` crate to +both *Cargo.toml* and *add-one/Cargo.toml*, Cargo will resolve both of those to +one version of `rand` and record that in the one *Cargo.lock*. Making all +crates in the workspace use the same dependencies means the crates in the +workspace will always be compatible with each other. Let’s try this out now. + +Let’s add the `rand` crate to the `[dependencies]` section in +*add-one/Cargo.toml* in order to be able to use the `rand` crate in the +`add-one` crate: + +Filename: add-one/Cargo.toml -```toml +``` [dependencies] rand = "0.3.14" ``` -And if we add `extern crate rand;` to *add-one/src/lib.rs* then run `cargo -build`, it will succeed: +We can now add `extern crate rand;` to *add-one/src/lib.rs*, and building the +whole workspace by running `cargo build` in the *adder* directory will bring in +and compile the `rand` crate: -```text +``` $ cargo build Updating registry `https://github.com/rust-lang/crates.io-index` Downloading rand v0.3.14 @@ -593,16 +878,16 @@ $ cargo build Compiling rand v0.3.14 Compiling add-one v0.1.0 (file:///projects/adder/add-one) Compiling adder v0.1.0 (file:///projects/adder) - Finished debug [unoptimized + debuginfo] target(s) in 10.18 secs + Finished dev [unoptimized + debuginfo] target(s) in 10.18 secs ``` -The top level *Cargo.lock* now contains information about the dependency -`add-one` has on `rand`. However, even though `rand` is used somewhere in the -workspace, we can't use it in other crates in the workspace unless we add +The top level *Cargo.lock* now contains information about `add-one`’s +dependency on `rand`. However, even though `rand` is used somewhere in the +workspace, we can’t use it in other crates in the workspace unless we add `rand` to their *Cargo.toml* as well. If we add `extern crate rand;` to -*src/main.rs* for the top level `adder` crate, for example, we'll get an error: +*src/main.rs* for the top level `adder` crate, for example, we’ll get an error: -```text +``` $ cargo build Compiling adder v0.1.0 (file:///projects/adder) error[E0463]: can't find crate for `rand` @@ -612,15 +897,23 @@ error[E0463]: can't find crate for `rand` | ^^^^^^^^^^^^^^^^^^^ can't find crate ``` -To fix this, edit *Cargo.toml* for the top level and indicate that `rand` is a -dependency for the `adder` crate. +To fix this, edit *Cargo.toml* for the top level `adder` crate and indicate +that `rand` is a dependency for that crate as well. Building the `adder` crate +will add `rand` to the list of dependencies for `adder` in *Cargo.lock*, but no +additional copies of `rand` will be downloaded. Cargo has ensured for us that +any crate in the workspace using the `rand` crate will be using the same +version. Using the same version of `rand` across the workspace saves space +since we won’t have multiple copies and ensures that the crates in the +workspace will be compatible with each other. + +#### Adding a Test to a Workspace -For another enhancement, let's add a test of the `add_one::add_one` function -within that crate: +For another enhancement, let’s add a test of the `add_one::add_one` function +within the `add_one` crate: -Filename: add-one/src/lib.rs +Filename: add-one/src/lib.rs -```rust +``` pub fn add_one(x: i32) -> i32 { x + 1 } @@ -638,10 +931,10 @@ mod tests { Now run `cargo test` in the top-level *adder* directory: -```text +``` $ cargo test Compiling adder v0.1.0 (file:///projects/adder) - Finished debug [unoptimized + debuginfo] target(s) in 0.27 secs + Finished dev [unoptimized + debuginfo] target(s) in 0.27 secs Running target/debug/adder-f0253159197f7841 running 0 tests @@ -650,51 +943,100 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ``` Wait a second, zero tests? We just added one! If we look at the output, we can -see that `cargo test` in a workspace only runs the tests for the top level -crate. To run tests for the other crates, we need to use the `-p` argument to -indicate we want to run tests for a particular package: +see that `cargo test` in a workspace only runs tests for the top level crate. +To run tests for all of the crates in the workspace, we need to pass the +`--all` flag: -```text -$ cargo test -p add-one - Finished debug [unoptimized + debuginfo] target(s) in 0.0 secs +``` +$ cargo test --all + Finished dev [unoptimized + debuginfo] target(s) in 0.37 secs Running target/debug/deps/add_one-abcabcabc running 1 test test tests::it_works ... ok -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + + Running target/debug/deps/adder-abcabcabc + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out Doc-tests add-one running 0 tests -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out +``` + +When passing `--all`, `cargo test` will run the tests for all of the crates in +the workspace. We can also choose to run tests for one particular crate in a +workspace from the top level directory by using the `-p` flag and specifying +the name of the crate we want to test: + +``` +$ cargo test -p add-one + Finished dev [unoptimized + debuginfo] target(s) in 0.0 secs + Running target/debug/deps/add_one-b3235fea9a156f74 + +running 1 test +test tests::it_works ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out + + Doc-tests add-one + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out ``` -Similarly, if you choose to publish the workspace to crates.io, each crate in -the workspace will get published separately. +This output shows `cargo test` only ran the tests for the `add-one` crate and +didn’t run the `adder` crate tests. + +If you choose to publish the crates in the workspace to crates.io, each crate +in the workspace will get published separately. The `cargo publish` command +does not have an `--all` flag or a `-p` flag, so it is necessary to change to +each crate’s directory and run `cargo publish` on each crate in the workspace +in order to publish them. -As your project grows, consider a workspace: smaller components are easier to -understand individually than one big blob of code. Keeping the crates in a -workspace can make coordination among them easier if they work together and are -often changed at the same time. + + + +Now try adding an `add-two` crate to this workspace in a similar way as the +`add-one` crate for some more practice! + +As your project grows, consider using a workspace: smaller components are +easier to understand individually than one big blob of code. Keeping the crates +in a workspace can make coordination among them easier if they work together +and are often changed at the same time. ## Installing Binaries from Crates.io with `cargo install` The `cargo install` command allows you to install and use binary crates -locally. This isn't intended to replace system packages; it's meant to be a +locally. This isn’t intended to replace system packages; it’s meant to be a convenient way for Rust developers to install tools that others have shared on -crates.io. Only packages which have binary targets can be installed, and all -binaries are installed into the installation root's *bin* folder. If you -installed Rust using *rustup.rs* and don't have any custom configurations, this -will be `$HOME/.cargo/bin`. Add that directory to your `$PATH` to be able to -run programs you've gotten through `cargo install`. - -For example, we mentioned in Chapter 12 that there's a Rust implementation of +crates.io. Only packages that have binary targets can be installed. A binary +target is the runnable program that gets created if the crate has a +*src/main.rs* or another file specified as a binary, as opposed to a library +target that isn’t runnable on its own but is suitable for including within +other programs. Usually, crates have information in the *README* file about +whether a crate is a library, has a binary target, or both. + + + + +All binaries from `cargo install` are put into the installation root’s *bin* +folder. If you installed Rust using *rustup.rs* and don’t have any custom +configurations, this will be `$HOME/.cargo/bin`. Add that directory to your +`$PATH` to be able to run programs you’ve gotten through `cargo install`. + +For example, we mentioned in Chapter 12 that there’s a Rust implementation of the `grep` tool for searching files called `ripgrep`. If we want to install `ripgrep`, we can run: -```text +``` $ cargo install ripgrep Updating registry `https://github.com/rust-lang/crates.io-index` Downloading ripgrep v0.3.2 @@ -705,23 +1047,23 @@ Updating registry `https://github.com/rust-lang/crates.io-index` ``` The last line of the output shows the location and the name of the installed -binary, which in the case of `ripgrep` is named `rg`. As long as the -installation directory is in our `$PATH` as mentioned above, we can then run -`rg --help` and start using a faster, rustier tool for searching files! +binary, which in the case of `ripgrep` is `rg`. As long as the installation +directory is in your `$PATH` as mentioned above, you can then run `rg --help` +and start using a faster, rustier tool for searching files! ## Extending Cargo with Custom Commands -Cargo is designed to be extensible with new subcommands without having to +Cargo is designed so you can extend it with new subcommands without having to modify Cargo itself. If a binary in your `$PATH` is named `cargo-something`, you can run it as if it were a Cargo subcommand by running `cargo something`. -Custom commands like this are also listed when you run `cargo --list`. It's -convenient to `cargo install` extensions to Cargo then be able to run them just -like the built-in Cargo tools! +Custom commands like this are also listed when you run `cargo --list`. Being +able to `cargo install` extensions and then run them just like the built-in +Cargo tools is a super convenient benefit of Cargo’s design! ## Summary Sharing code with Cargo and crates.io is part of what makes the Rust ecosystem -useful for many different tasks. Rust's standard library is small and stable, -but crates are easy to share, use, and improve on a different timeline than the -language itself. Don't be shy about sharing code that's useful to you on -crates.io; it's likely that it will be useful to someone else as well! +useful for many different tasks. Rust’s standard library is small and stable, +but crates are easy to share, use, and improve on a timeline different from the +language itself. Don’t be shy about sharing code that’s useful to you on +Crates.io; it’s likely that it will be useful to someone else as well! diff --git a/src/doc/book/second-edition/nostarch/chapter15.md b/src/doc/book/second-edition/nostarch/chapter15.md index 759ad3ebb8..57fbc43c13 100644 --- a/src/doc/book/second-edition/nostarch/chapter15.md +++ b/src/doc/book/second-edition/nostarch/chapter15.md @@ -4,8 +4,8 @@ # Smart Pointers *Pointer* is a generic programming term for something that refers to a location -that stores some other data. We learned about Rust's references in Chapter 4; -they're a plain sort of pointer indicated by the `&` symbol and borrow the +that stores some other data. We learned about Rust’s references in Chapter 4; +they’re a plain sort of pointer indicated by the `&` symbol and borrow the value that they point to. *Smart pointers* are data structures that act like a pointer, but also have additional metadata and capabilities, such as reference counting. The smart pointer pattern originated in C++. In Rust, an additional @@ -13,35 +13,35 @@ difference between plain references and smart pointers is that references are a kind of pointer that only borrow data; by contrast, in many cases, smart pointers *own* the data that they point to. -We've actually already encountered a few smart pointers in this book, even -though we didn't call them that by name at the time. For example, in a certain +We’ve actually already encountered a few smart pointers in this book, even +though we didn’t call them that by name at the time. For example, in a certain sense, `String` and `Vec` from Chapter 8 are both smart pointers. They own some memory and allow you to manipulate it, and have metadata (like their capacity) and extra capabilities or guarantees (`String` data will always be valid UTF-8). The characteristics that distinguish a smart pointer from an ordinary struct are that smart pointers implement the `Deref` and `Drop` -traits, and in this chapter we'll be discussing both of those traits and why -they're important to smart pointers. +traits, and in this chapter we’ll be discussing both of those traits and why +they’re important to smart pointers. Given that the smart pointer pattern is a general design pattern used -frequently in Rust, this chapter won't cover every smart pointer that exists. +frequently in Rust, this chapter won’t cover every smart pointer that exists. Many libraries have their own and you may write some yourself. The ones we cover here are the most common ones from the standard library: * `Box`, for allocating values on the heap * `Rc`, a reference counted type so data can have multiple owners -* `RefCell`, which isn't a smart pointer itself, but manages access to the +* `RefCell`, which isn’t a smart pointer itself, but manages access to the smart pointers `Ref` and `RefMut` to enforce the borrowing rules at runtime instead of compile time -Along the way, we'll also cover: +Along the way, we’ll also cover: * The *interior mutability* pattern where an immutable type exposes an API for mutating an interior value, and the borrowing rules apply at runtime instead of compile time * Reference cycles, how they can leak memory, and how to prevent them -Let's dive in! +Let’s dive in! ## `Box` Points to Data on the Heap and Has a Known Size @@ -67,7 +67,7 @@ has ownership of data, when a box goes out of scope like `b` does at the end of `main`, it will be deallocated. The deallocation happens for both the box (stored on the stack) and the data it points to (stored on the heap). -Putting a single value on the heap isn't very useful, so you won't use boxes by +Putting a single value on the heap isn’t very useful, so you won’t use boxes by themselves in the way that Listing 15-1 does very often. A time when boxes are useful is when you want to ensure that your type has a known size. For example, consider Listing 15-2, which contains an enum definition for a *cons @@ -75,13 +75,13 @@ list*, a type of data structure that comes from functional programming. A cons list is a list where each item contains a value and the next item until the end of the list, which is signified by a value called `Nil`. Note that we -aren't introducing the idea of "nil" or "null" that we discussed in Chapter 6, -this is just a regular enum variant name we're using because it's the canonical -name to use when describing the cons list data structure. Cons lists aren't +aren’t introducing the idea of “nil” or “null” that we discussed in Chapter 6, +this is just a regular enum variant name we’re using because it’s the canonical +name to use when describing the cons list data structure. Cons lists aren’t used very often in Rust, `Vec` is a better choice most of the time, but implementing this data structure is useful as an example. -Here's our first try at defining a cons list as an enum; note that this won't +Here’s our first try at defining a cons list as an enum; note that this won’t compile quite yet: Filename: src/main.rs @@ -96,7 +96,7 @@ enum List { Listing 15-2: The first attempt of defining an enum to represent a cons list data structure of `i32` values -We're choosing to implement a cons list that only holds `i32` values, but we +We’re choosing to implement a cons list that only holds `i32` values, but we could have chosen to implement it using generics as we discussed in Chapter 10 to define a cons list concept independent of the type of value stored in the cons list. @@ -135,10 +135,10 @@ error[E0072]: recursive type `List` has infinite size Listing 15-3: The error we get when attempting to define a recursive enum -The error says this type 'has infinite size'. Why is that? It's because we've +The error says this type ‘has infinite size’. Why is that? It’s because we’ve defined `List` to have a variant that is recursive: it holds another value of -itself. This means Rust can't figure out how much space it needs in order to -store a `List` value. Let's break this down a bit: first let's look at how Rust +itself. This means Rust can’t figure out how much space it needs in order to +store a `List` value. Let’s break this down a bit: first let’s look at how Rust decides how much space it needs to store a value of a non-recursive type. Recall the `Message` enum we defined in Listing 6-2 when we discussed enum definitions in Chapter 6: @@ -172,7 +172,7 @@ as shown in Figure 15-4. Figure 15-4: An infinite `List` consisting of infinite `Cons` variants -Rust can't figure out how much space to allocate for recursively defined types, +Rust can’t figure out how much space to allocate for recursively defined types, so the compiler gives the error in Listing 15-3. The error did include this helpful suggestion: @@ -213,9 +213,9 @@ known size The compiler will be able to figure out the size it needs to store a `List` value. Rust will look at `List`, and again start by looking at the `Cons` variant. The `Cons` variant will need the size of `i32` plus the space to store -a `usize`, since a box always has the size of a `usize`, no matter what it's +a `usize`, since a box always has the size of a `usize`, no matter what it’s pointing to. Then Rust looks at the `Nil` variant, which does not store a -value, so `Nil` doesn't need any space. We've broken the infinite, recursive +value, so `Nil` doesn’t need any space. We’ve broken the infinite, recursive chain by adding in a box. Figure 15-6 shows what the `Cons` variant looks like now: @@ -224,14 +224,14 @@ now: Figure 15-6: A `List` that is not infinitely sized since `Cons` holds a `Box` This is the main area where boxes are useful: breaking up an infinite data -structure so that the compiler can know what size it is. We'll look at another +structure so that the compiler can know what size it is. We’ll look at another case where Rust has data of unknown size in Chapter 17 when we discuss trait objects. -Even though you won't be using boxes very often, they are a good way to +Even though you won’t be using boxes very often, they are a good way to understand the smart pointer pattern. Two of the aspects of `Box` that are commonly used with smart pointers are its implementations of the `Deref` trait -and the `Drop` trait. Let's investigate how these traits work and how smart +and the `Drop` trait. Let’s investigate how these traits work and how smart pointers use them. ## The `Deref` Trait Allows Access to the Data Through a Reference @@ -239,13 +239,13 @@ pointers use them. The first important smart pointer-related trait is `Deref`, which allows us to override `*`, the dereference operator (as opposed to the multiplication operator or the glob operator). Overriding `*` for smart pointers makes -accessing the data behind the smart pointer convenient, and we'll talk about +accessing the data behind the smart pointer convenient, and we’ll talk about what we mean by convenient when we get to deref coercions later in this section. We briefly mentioned the dereference operator in Chapter 8, in the hash map -section titled "Update a Value Based on the Old Value". We had a mutable +section titled “Update a Value Based on the Old Value”. We had a mutable reference, and we wanted to change the value that the reference was pointing -to. In order to do that, first we had to dereference the reference. Here's +to. In order to do that, first we had to dereference the reference. Here’s another example using references to `i32` values: ``` @@ -263,7 +263,7 @@ We use `*y` to access the data that the mutable reference in `y` refers to, rather than the mutable reference itself. We can then modify that data, in this case by adding 1. -With references that aren't smart pointers, there's only one value that the +With references that aren’t smart pointers, there’s only one value that the reference is pointing to, so the dereference operation is straightforward. Smart pointers can also store metadata about the pointer or the data. When dereferencing a smart pointer, we only want the data, not the metadata, since @@ -272,7 +272,7 @@ to be able to use smart pointers in the same places that we can use regular references. To enable that, we can override the behavior of the `*` operator by implementing the `Deref` trait. -Listing 15-7 has an example of overriding `*` using `Deref` on a struct we've +Listing 15-7 has an example of overriding `*` using `Deref` on a struct we’ve defined to hold mp3 data and metadata. `Mp3` is, in a sense, a smart pointer: it owns the `Vec` data containing the audio. In addition, it holds some optional metadata, in this case the artist and title of the song in the audio @@ -317,17 +317,17 @@ file data and metadata Most of this should look familiar: a struct, a trait implementation, and a main function that creates an instance of the struct. There is one part we -haven't explained thoroughly yet: similarly to Chapter 13 when we looked at the +haven’t explained thoroughly yet: similarly to Chapter 13 when we looked at the Iterator trait with the `type Item`, the `type Target = T;` syntax is defining -an associated type, which is covered in more detail in Chapter 19. Don't worry +an associated type, which is covered in more detail in Chapter 19. Don’t worry about that part of the example too much; it is a slightly different way of declaring a generic parameter. -In the `assert_eq!`, we're verifying that `vec![1, 2, 3]` is the result we get +In the `assert_eq!`, we’re verifying that `vec![1, 2, 3]` is the result we get when dereferencing the `Mp3` instance with `*my_favorite_song`, which is what happens since we implemented the `deref` method to return the audio data. If -we hadn't implemented the `Deref` trait for `Mp3`, Rust wouldn't compile the -code `*my_favorite_song`: we'd get an error saying type `Mp3` cannot be +we hadn’t implemented the `Deref` trait for `Mp3`, Rust wouldn’t compile the +code `*my_favorite_song`: we’d get an error saying type `Mp3` cannot be dereferenced. The reason this code works is that what the `*` operator is doing behind @@ -339,14 +339,14 @@ the scenes when we call `*my_favorite_song` is: This calls the `deref` method on `my_favorite_song`, which borrows `my_favorite_song` and returns a reference to `my_favorite_song.audio`, since -that's what we defined `deref` to do in Listing 15-5. `*` on references is +that’s what we defined `deref` to do in Listing 15-5. `*` on references is defined to just follow the reference and return the data, so the expansion of -`*` doesn't recurse for the outer `*`. So we end up with data of type +`*` doesn’t recurse for the outer `*`. So we end up with data of type `Vec`, which matches the `vec![1, 2, 3]` in the `assert_eq!` in Listing 15-5. The reason that the return type of the `deref` method is still a reference and -why it's necessary to dereference the result of the method is that if the +why it’s necessary to dereference the result of the method is that if the `deref` method returned just the value, using `*` would always take ownership. ### Implicit Deref Coercions with Functions and Methods @@ -354,14 +354,14 @@ why it's necessary to dereference the result of the method is that if the Rust tends to favor explicitness over implicitness, but one case where this does not hold true is *deref coercions* of arguments to functions and methods. A deref coercion will automatically convert a reference to a pointer or a smart -pointer into a reference to that pointer's contents. A deref coercion happens -when a value is passed to a function or method, and only happens if it's needed +pointer into a reference to that pointer’s contents. A deref coercion happens +when a value is passed to a function or method, and only happens if it’s needed to get the type of the value passed in to match the type of the parameter defined in the signature. Deref coercion was added to Rust to make calling functions and methods not need as many explicit references and dereferences with `&` and `*`. -Using our `Mp3` struct from Listing 15-5, here's the signature of a function to +Using our `Mp3` struct from Listing 15-5, here’s the signature of a function to compress mp3 audio data that takes a slice of `u8`: ``` @@ -370,16 +370,16 @@ fn compress_mp3(audio: &[u8]) -> Vec { } ``` -If Rust didn't have deref coercion, in order to call this function with the -audio data in `my_favorite_song`, we'd have to write: +If Rust didn’t have deref coercion, in order to call this function with the +audio data in `my_favorite_song`, we’d have to write: ``` compress_mp3(my_favorite_song.audio.as_slice()) ``` -That is, we'd have to explicitly say that we want the data in the `audio` field +That is, we’d have to explicitly say that we want the data in the `audio` field of `my_favorite_song` and that we want a slice referring to the whole -`Vec`. If there were a lot of places where we'd want process the `audio` +`Vec`. If there were a lot of places where we’d want process the `audio` data in a similar manner, `.audio.as_slice()` would be wordy and repetitive. However, because of deref coercion and our implementation of the `Deref` trait @@ -400,11 +400,11 @@ documentation for `Vec`). So, at compile time, Rust will see that it can use `Deref::deref` twice to turn `&Mp3` into `&Vec` and then into `&[T]` to match the signature of `compress_mp3`. That means we get to do less typing! Rust will analyze types through `Deref::deref` as many times as it needs to in -order to get a reference to match the parameter's type, when the `Deref` trait +order to get a reference to match the parameter’s type, when the `Deref` trait is defined for the types involved. The indirection is resolved at compile time, so there is no run-time penalty for taking advantage of deref coercion. -There's also a `DerefMut` trait for overriding `*` on `&mut T` for use in +There’s also a `DerefMut` trait for overriding `*` on `&mut T` for use in assignment in the same fashion that we use `Deref` to override `*` on `&T`s. Rust does deref coercion when it finds types and trait implementations in three @@ -422,17 +422,17 @@ possible though: immutable references will never coerce to mutable ones. The reason that the `Deref` trait is important to the smart pointer pattern is that smart pointers can then be treated like regular references and used in -places that expect regular references. We don't have to redefine methods and +places that expect regular references. We don’t have to redefine methods and functions to take smart pointers explicitly, for example. ## The `Drop` Trait Runs Code on Cleanup -The other trait that's important to the smart pointer pattern is the `Drop` +The other trait that’s important to the smart pointer pattern is the `Drop` trait. `Drop` lets us run some code when a value is about to go out of scope. Smart pointers perform important cleanup when being dropped, like deallocating memory or decrementing a reference count. More generally, data types can manage resources beyond memory, like files or network connections, and use `Drop` to -release those resources when our code is done with them. We're discussing +release those resources when our code is done with them. We’re discussing `Drop` in the context of smart pointers, though, because the functionality of the `Drop` trait is almost always used when implementing smart pointers. @@ -440,19 +440,19 @@ In some other languages, we have to remember to call code to free the memory or resource every time we finish using an instance of a smart pointer. If we forget, the system our code is running on might get overloaded and crash. In Rust, we can specify that some code should be run when a value goes out of -scope, and the compiler will insert this code automatically. That means we don't -need to remember to put this code everywhere we're done with an instance of -these types, but we still won't leak resources! +scope, and the compiler will insert this code automatically. That means we don’t +need to remember to put this code everywhere we’re done with an instance of +these types, but we still won’t leak resources! The way we specify code should be run when a value goes out of scope is by implementing the `Drop` trait. The `Drop` trait requires us to implement one method named `drop` that takes a mutable reference to `self`. -Listing 15-8 shows a `CustomSmartPointer` struct that doesn't actually do -anything, but we're printing out `CustomSmartPointer created.` right after we +Listing 15-8 shows a `CustomSmartPointer` struct that doesn’t actually do +anything, but we’re printing out `CustomSmartPointer created.` right after we create an instance of the struct and `Dropping CustomSmartPointer!` when the instance goes out of scope so that we can see when each piece of code gets run. -Instead of a `println!` statement, you'd fill in `drop` with whatever cleanup +Instead of a `println!` statement, you’d fill in `drop` with whatever cleanup code your smart pointer needs to run: Filename: src/main.rs @@ -478,15 +478,15 @@ fn main() { Listing 15-8: A `CustomSmartPointer` struct that implements the `Drop` trait, where we could put code that would clean up after the `CustomSmartPointer`. -The `Drop` trait is in the prelude, so we don't need to import it. The `drop` -method implementation calls the `println!`; this is where you'd put the actual +The `Drop` trait is in the prelude, so we don’t need to import it. The `drop` +method implementation calls the `println!`; this is where you’d put the actual code needed to close the socket. In `main`, we create a new instance of `CustomSmartPointer` then print out `CustomSmartPointer created.` to be able to see that our code got to that point at runtime. At the end of `main`, our -instance of `CustomSmartPointer` will go out of scope. Note that we didn't call +instance of `CustomSmartPointer` will go out of scope. Note that we didn’t call the `drop` method explicitly. -When we run this program, we'll see: +When we run this program, we’ll see: ``` CustomSmartPointer created. @@ -498,11 +498,11 @@ printed to the screen, which shows that Rust automatically called `drop` for us when our instance went out of scope. We can use the `std::mem::drop` function to drop a value earlier than when it -goes out of scope. This isn't usually necessary; the whole point of the `Drop` -trait is that it's taken care of automatically for us. We'll see an example of -a case when we'll need to drop a value earlier than when it goes out of scope -in Chapter 16 when we're talking about concurrency. For now, let's just see -that it's possible, and `std::mem::drop` is in the prelude so we can just call +goes out of scope. This isn’t usually necessary; the whole point of the `Drop` +trait is that it’s taken care of automatically for us. We’ll see an example of +a case when we’ll need to drop a value earlier than when it goes out of scope +in Chapter 16 when we’re talking about concurrency. For now, let’s just see +that it’s possible, and `std::mem::drop` is in the prelude so we can just call `drop` as shown in Listing 15-9: Filename: src/main.rs @@ -529,13 +529,13 @@ Dropping CustomSmartPointer! Wait for it... ``` -Note that we aren't allowed to call the `drop` method that we defined directly: -if we replaced `drop(c)` in Listing 15-9 with `c.drop()`, we'll get a compiler -error that says `explicit destructor calls not allowed`. We're not allowed to +Note that we aren’t allowed to call the `drop` method that we defined directly: +if we replaced `drop(c)` in Listing 15-9 with `c.drop()`, we’ll get a compiler +error that says `explicit destructor calls not allowed`. We’re not allowed to call `Drop::drop` directly because when Rust inserts its call to `Drop::drop` automatically when the value goes out of scope, then the value would get dropped twice. Dropping a value twice could cause an error or corrupt memory, -so Rust doesn't let us. Instead, we can use `std::mem::drop`, whose definition +so Rust doesn’t let us. Instead, we can use `std::mem::drop`, whose definition is: ``` @@ -547,28 +547,28 @@ pub mod std { ``` This function is generic over any type `T`, so we can pass any value to it. The -function doesn't actually have anything in its body, so it doesn't use its +function doesn’t actually have anything in its body, so it doesn’t use its parameter. The reason this empty function is useful is that `drop` takes ownership of its parameter, which means the value in `x` gets dropped at the end of this function when `x` goes out of scope. Code specified in a `Drop` trait implementation can be used for many reasons to make cleanup convenient and safe: we could use it to create our own memory -allocator, for instance! By using the `Drop` trait and Rust's ownership system, -we don't have to remember to clean up after ourselves since Rust takes care of -it automatically. We'll get compiler errors if we write code that would clean -up a value that's still in use, since the ownership system that makes sure +allocator, for instance! By using the `Drop` trait and Rust’s ownership system, +we don’t have to remember to clean up after ourselves since Rust takes care of +it automatically. We’ll get compiler errors if we write code that would clean +up a value that’s still in use, since the ownership system that makes sure references are always valid will also make sure that `drop` only gets called one time when the value is no longer being used. -Now that we've gone over `Box` and some of the characteristics of smart -pointers, let's talk about a few other smart pointers defined in the standard +Now that we’ve gone over `Box` and some of the characteristics of smart +pointers, let’s talk about a few other smart pointers defined in the standard library that add different kinds of useful functionality. ## `Rc`, the Reference Counted Smart Pointer In the majority of cases, ownership is very clear: you know exactly which -variable owns a given value. However, this isn't always the case; sometimes, +variable owns a given value. However, this isn’t always the case; sometimes, you may actually need multiple owners. For this, Rust has a type called `Rc`. Its name is an abbreviation for *reference counting*. Reference counting means keeping track of the number of references to a value in order to @@ -576,15 +576,15 @@ know if a value is still in use or not. If there are zero references to a value, we know we can clean up the value without any references becoming invalid. -To think about this in terms of a real-world scenario, it's like a TV in a +To think about this in terms of a real-world scenario, it’s like a TV in a family room. When one person comes in the room to watch TV, they turn it on. Others can also come in the room and watch the TV. When the last person leaves -the room, they'll turn the TV off since it's no longer being used. If someone +the room, they’ll turn the TV off since it’s no longer being used. If someone turns off the TV while others are still watching it, though, the people watching the TV would get mad! `Rc` is for use when we want to allocate some data on the heap for multiple -parts of our program to read, and we can't determine at compile time which part +parts of our program to read, and we can’t determine at compile time which part of our program using this data will finish using it last. If we knew which part would finish last, we could make that part the owner of the data and the normal ownership rules enforced at compile time would kick in. @@ -592,12 +592,12 @@ ownership rules enforced at compile time would kick in. Note that `Rc` is only for use in single-threaded scenarios; the next chapter on concurrency will cover how to do reference counting in multithreaded programs. If you try to use `Rc` with multiple threads, -you'll get a compile-time error. +you’ll get a compile-time error. ### Using `Rc` to Share Data -Let's return to our cons list example from Listing 15-5. In Listing 15-11, we're -going to try to use `List` as we defined it using `Box`. First we'll create +Let’s return to our cons list example from Listing 15-5. In Listing 15-11, we’re +going to try to use `List` as we defined it using `Box`. First we’ll create one list instance that contains 5 and then 10. Next, we want to create two more lists: one that starts with 3 and continues on to our first list containing 5 and 10, then another list that starts with 4 and *also* continues on to our @@ -609,7 +609,7 @@ Figure 15-10: Figure 15-10: Two lists, `b` and `c`, sharing ownership of a third list, `a` -Trying to implement this using our definition of `List` with `Box` won't +Trying to implement this using our definition of `List` with `Box` won’t work, as shown in Listing 15-11: Filename: src/main.rs @@ -632,7 +632,7 @@ fn main() { ``` Listing 15-11: Having two lists using `Box` that try to share ownership of a -third list won't work +third list won’t work If we compile this, we get this error: @@ -651,12 +651,12 @@ error[E0382]: use of moved value: `a` The `Cons` variants own the data they hold, so when we create the `b` list it moves `a` to be owned by `b`. Then when we try to use `a` again when creating -`c`, we're not allowed to since `a` has been moved. +`c`, we’re not allowed to since `a` has been moved. We could change the definition of `Cons` to hold references instead, but then -we'd have to specify lifetime parameters and we'd have to construct elements of +we’d have to specify lifetime parameters and we’d have to construct elements of a list such that every element lives at least as long as the list itself. -Otherwise, the borrow checker won't even let us compile the code. +Otherwise, the borrow checker won’t even let us compile the code. Instead, we can change our definition of `List` to use `Rc` instead of `Box` as shown here in Listing 15-12: @@ -681,19 +681,19 @@ fn main() { Listing 15-12: A definition of `List` that uses `Rc` -Note that we need to add a `use` statement for `Rc` because it's not in the +Note that we need to add a `use` statement for `Rc` because it’s not in the prelude. In `main`, we create the list holding 5 and 10 and store it in a new `Rc` in `a`. Then when we create `b` and `c`, we call the `clone` method on `a`. ### Cloning an `Rc` Increases the Reference Count -We've seen the `clone` method previously, where we used it for making a -complete copy of some data. With `Rc`, though, it doesn't make a full copy. +We’ve seen the `clone` method previously, where we used it for making a +complete copy of some data. With `Rc`, though, it doesn’t make a full copy. `Rc` holds a *reference count*, that is, a count of how many clones exist. -Let's change `main` as shown in Listing 15-13 to have an inner scope around +Let’s change `main` as shown in Listing 15-13 to have an inner scope around where we create `c`, and to print out the results of the `Rc::strong_count` associated function at various points. `Rc::strong_count` returns the reference -count of the `Rc` value we pass to it, and we'll talk about why this function +count of the `Rc` value we pass to it, and we’ll talk about why this function is named `strong_count` in the section later in this chapter about preventing reference cycles. @@ -724,10 +724,10 @@ rc after creating c = 3 rc after c goes out of scope = 2 ``` -We're able to see that `a` has an initial reference count of one. Then each +We’re able to see that `a` has an initial reference count of one. Then each time we call `clone`, the count goes up by one. When `c` goes out of scope, the count is decreased by one, which happens in the implementation of the `Drop` -trait for `Rc`. What we can't see in this example is that when `b` and then +trait for `Rc`. What we can’t see in this example is that when `b` and then `a` go out of scope at the end of `main`, the count of references to the list containing 5 and 10 is then 0, and the list is dropped. This strategy lets us have multiple owners, as the count will ensure that the value remains valid as @@ -736,9 +736,9 @@ long as any of the owners still exist. In the beginning of this section, we said that `Rc` only allows you to share data for multiple parts of your program to read through immutable references to the `T` value the `Rc` contains. If `Rc` let us have a mutable reference, -we'd run into the problem that the borrowing rules disallow that we discussed +we’d run into the problem that the borrowing rules disallow that we discussed in Chapter 4: two mutable borrows to the same place can cause data races and -inconsistencies. But mutating data is very useful! In the next section, we'll +inconsistencies. But mutating data is very useful! In the next section, we’ll discuss the interior mutability pattern and the `RefCell` type that we can use in conjunction with an `Rc` to work with this restriction on immutability. @@ -748,58 +748,58 @@ immutability. *Interior mutability* is a design pattern in Rust for allowing you to mutate data even though there are immutable references to that data, which would normally be disallowed by the borrowing rules. The interior mutability pattern -involves using `unsafe` code inside a data structure to bend Rust's usual rules -around mutation and borrowing. We haven't yet covered unsafe code; we will in +involves using `unsafe` code inside a data structure to bend Rust’s usual rules +around mutation and borrowing. We haven’t yet covered unsafe code; we will in Chapter 19. The interior mutability pattern is used when you can ensure that -the borrowing rules will be followed at runtime, even though the compiler can't +the borrowing rules will be followed at runtime, even though the compiler can’t ensure that. The `unsafe` code involved is then wrapped in a safe API, and the outer type is still immutable. -Let's explore this by looking at the `RefCell` type that follows the +Let’s explore this by looking at the `RefCell` type that follows the interior mutability pattern. ### `RefCell` has Interior Mutability Unlike `Rc`, the `RefCell` type represents single ownership over the data that it holds. So, what makes `RefCell` different than a type like `Box`? -Let's recall the borrowing rules we learned in Chapter 4: +Let’s recall the borrowing rules we learned in Chapter 4: 1. At any given time, you can have *either* but not both of: * One mutable reference. * Any number of immutable references. 2. References must always be valid. -With references and `Box`, the borrowing rules' invariants are enforced at +With references and `Box`, the borrowing rules’ invariants are enforced at compile time. With `RefCell`, these invariants are enforced *at runtime*. -With references, if you break these rules, you'll get a compiler error. With -`RefCell`, if you break these rules, you'll get a `panic!`. +With references, if you break these rules, you’ll get a compiler error. With +`RefCell`, if you break these rules, you’ll get a `panic!`. Static analysis, like the Rust compiler performs, is inherently conservative. There are properties of code that are impossible to detect by analyzing the code: the most famous is the Halting Problem, which is out of scope of this -book but an interesting topic to research if you're interested. +book but an interesting topic to research if you’re interested. Because some analysis is impossible, the Rust compiler does not try to even -guess if it can't be sure, so it's conservative and sometimes rejects correct -programs that would not actually violate Rust's guarantees. Put another way, if +guess if it can’t be sure, so it’s conservative and sometimes rejects correct +programs that would not actually violate Rust’s guarantees. Put another way, if Rust accepts an incorrect program, people would not be able to trust in the guarantees Rust makes. If Rust rejects a correct program, the programmer will be inconvenienced, but nothing catastrophic can occur. `RefCell` is useful -when you know that the borrowing rules are respected, but the compiler can't -understand that that's true. +when you know that the borrowing rules are respected, but the compiler can’t +understand that that’s true. Similarly to `Rc`, `RefCell` is only for use in single-threaded -scenarios. We'll talk about how to get the functionality of `RefCell` in a +scenarios. We’ll talk about how to get the functionality of `RefCell` in a multithreaded program in the next chapter on concurrency. For now, all you need to know is that if you try to use `RefCell` in a multithreaded -context, you'll get a compile time error. +context, you’ll get a compile time error. With references, we use the `&` and `&mut` syntax to create references and mutable references, respectively. But with `RefCell`, we use the `borrow` and `borrow_mut` methods, which are part of the safe API that `RefCell` has. `borrow` returns the smart pointer type `Ref`, and `borrow_mut` returns the smart pointer type `RefMut`. These two types implement `Deref` so that we can -treat them as if they're regular references. `Ref` and `RefMut` track the +treat them as if they’re regular references. `Ref` and `RefMut` track the borrows dynamically, and their implementation of `Drop` releases the borrow dynamically. @@ -843,7 +843,7 @@ a is 5 a is 6 ``` -In `main`, we've created a new `RefCell` containing the value 5, and stored +In `main`, we’ve created a new `RefCell` containing the value 5, and stored in the variable `data`, declared without the `mut` keyword. We then call the `demo` function with an immutable reference to `data`: as far as `main` is concerned, `data` is immutable! @@ -854,14 +854,14 @@ In the `demo` function, we get an immutable reference to the value inside the interestingly, we can get a *mutable* reference to the value inside the `RefCell` with the `borrow_mut` method, and the function `a_fn_that_mutably_borrows` is allowed to change the value. We can see that the -next time we call `a_fn_that_immutably_borrows` that prints out the value, it's +next time we call `a_fn_that_immutably_borrows` that prints out the value, it’s 6 instead of 5. ### Borrowing Rules are Checked at Runtime on `RefCell` Recall from Chapter 4 that because of the borrowing rules, this code using regular references that tries to create two mutable borrows in the same scope -won't compile: +won’t compile: ``` let mut s = String::from("hello"); @@ -870,7 +870,7 @@ let r1 = &mut s; let r2 = &mut s; ``` -We'll get this compiler error: +We’ll get this compiler error: ``` error[E0499]: cannot borrow `s` as mutable more than once at a time @@ -885,7 +885,7 @@ error[E0499]: cannot borrow `s` as mutable more than once at a time ``` In contrast, using `RefCell` and calling `borrow_mut` twice in the same -scope *will* compile, but it'll panic at runtime instead. This code: +scope *will* compile, but it’ll panic at runtime instead. This code: ``` use std::cell::RefCell; @@ -908,12 +908,12 @@ thread 'main' panicked at 'already borrowed: BorrowMutError', note: Run with `RUST_BACKTRACE=1` for a backtrace. ``` -This runtime `BorrowMutError` is similar to the compiler error: it says we've -already borrowed `s` mutably once, so we're not allowed to borrow it again. We -aren't getting around the borrowing rules, we're just choosing to have Rust +This runtime `BorrowMutError` is similar to the compiler error: it says we’ve +already borrowed `s` mutably once, so we’re not allowed to borrow it again. We +aren’t getting around the borrowing rules, we’re just choosing to have Rust enforce them at runtime instead of compile time. You could choose to use `RefCell` everywhere all the time, but in addition to having to type -`RefCell` a lot, you'd find out about possible problems later (possibly in +`RefCell` a lot, you’d find out about possible problems later (possibly in production rather than during development). Also, checking the borrowing rules while your program is running has a performance penalty. @@ -922,12 +922,12 @@ while your program is running has a performance penalty. So why would we choose to make the tradeoffs that using `RefCell` involves? Well, remember when we said that `Rc` only lets you have an immutable reference to `T`? Given that `RefCell` is immutable, but has interior -mutability, we can combine `Rc` and `RefCell` to get a type that's both +mutability, we can combine `Rc` and `RefCell` to get a type that’s both reference counted and mutable. Listing 15-15 shows an example of how to do that, again going back to our cons list from Listing 15-5. In this example, -instead of storing `i32` values in the cons list, we'll be storing +instead of storing `i32` values in the cons list, we’ll be storing `Rc>` values. We want to store that type so that we can have an -owner of the value that's not part of the list (the multiple owners +owner of the value that’s not part of the list (the multiple owners functionality that `Rc` provides), and so we can mutate the inner `i32` value (the interior mutability functionality that `RefCell` provides): @@ -963,7 +963,7 @@ fn main() { Listing 15-15: Using `Rc>` to create a `List` that we can mutate -We're creating a value, which is an instance of `Rc>. We're +We’re creating a value, which is an instance of `Rc>`. We’re storing it in a variable named `value` because we want to be able to access it directly later. Then we create a `List` in `a` that has a `Cons` variant that holds `value`, and `value` needs to be cloned since we want `value` to also @@ -988,37 +988,37 @@ This is pretty neat! By using `RefCell`, we can have an outwardly immutable `List`, but we can use the methods on `RefCell` that provide access to its interior mutability to be able to modify our data when we need to. The runtime checks of the borrowing rules that `RefCell` does protect us from data -races, and we've decided that we want to trade a bit of speed for the +races, and we’ve decided that we want to trade a bit of speed for the flexibility in our data structures. `RefCell` is not the only standard library type that provides interior mutability. `Cell` is similar but instead of giving references to the inner value like `RefCell` does, the value is copied in and out of the `Cell`. `Mutex` offers interior mutability that is safe to use across threads, and -we'll be discussing its use in the next chapter on concurrency. Check out the +we’ll be discussing its use in the next chapter on concurrency. Check out the standard library docs for more details on the differences between these types. ## Creating Reference Cycles and Leaking Memory is Safe -Rust makes a number of guarantees that we've talked about, for example that -we'll never have a null value, and data races will be disallowed at compile -time. Rust's memory safety guarantees make it more difficult to create memory +Rust makes a number of guarantees that we’ve talked about, for example that +we’ll never have a null value, and data races will be disallowed at compile +time. Rust’s memory safety guarantees make it more difficult to create memory that never gets cleaned up, which is known as a *memory leak*. Rust does not make memory leaks *impossible*, however, preventing memory leaks is *not* one -of Rust's guarantees. In other words, memory leaks are memory safe. +of Rust’s guarantees. In other words, memory leaks are memory safe. By using `Rc` and `RefCell`, it is possible to create cycles of references where items refer to each other in a cycle. This is bad because the reference count of each item in the cycle will never reach 0, and the values -will never be dropped. Let's take a look at how that might happen and how to +will never be dropped. Let’s take a look at how that might happen and how to prevent it. -In Listing 15-16, we're going to use another variation of the `List` definition -from Listing 15-5. We're going back to storing an `i32` value as the first +In Listing 15-16, we’re going to use another variation of the `List` definition +from Listing 15-5. We’re going back to storing an `i32` value as the first element in the `Cons` variant. The second element in the `Cons` variant is now `RefCell>`: instead of being able to modify the `i32` value this time, we want to be able to modify which `List` a `Cons` variant is pointing to. -We've also added a `tail` method to make it convenient for us to access the +We’ve also added a `tail` method to make it convenient for us to access the second item, if we have a `Cons` variant: Filename: src/main.rs @@ -1043,10 +1043,10 @@ impl List { Listing 15-16: A cons list definition that holds a `RefCell` so that we can modify what a `Cons` variant is referring to -Next, in Listing 15-17, we're going to create a `List` value in the variable -`a` that initially is a list of `5, Nil`. Then we'll create a `List` value in +Next, in Listing 15-17, we’re going to create a `List` value in the variable +`a` that initially is a list of `5, Nil`. Then we’ll create a `List` value in the variable `b` that is a list of the value 10 and then points to the list in -`a`. Finally, we'll modify `a` so that it points to `b` instead of `Nil`, which +`a`. Finally, we’ll modify `a` so that it points to `b` instead of `Nil`, which will then create a cycle: Filename: src/main.rs @@ -1088,7 +1088,7 @@ each other We use the `tail` method to get a reference to the `RefCell` in `a`, which we put in the variable `link`. Then we use the `borrow_mut` method on the `RefCell` to change the value inside from an `Rc` that holds a `Nil` value to -the `Rc` in `b`. We've created a reference cycle that looks like Figure 15-18: +the `Rc` in `b`. We’ve created a reference cycle that looks like Figure 15-18: Reference cycle of lists @@ -1098,39 +1098,39 @@ If you uncomment the last `println!`, Rust will try and print this cycle out with `a` pointing to `b` pointing to `a` and so forth until it overflows the stack. -Looking at the results of the `println!` calls before the last one, we'll see +Looking at the results of the `println!` calls before the last one, we’ll see that the reference count of both `a` and `b` are 2 after we change `a` to point to `b`. At the end of `main`, Rust will try and drop `b` first, which will decrease the count of the `Rc` by one. However, because `a` is still referencing that `Rc`, its count is 1 rather than 0, so the memory the `Rc` has -on the heap won't be dropped. It'll just sit there with a count of one, -forever. In this specific case, the program ends right away, so it's not a +on the heap won’t be dropped. It’ll just sit there with a count of one, +forever. In this specific case, the program ends right away, so it’s not a problem, but in a more complex program that allocates lots of memory in a cycle and holds onto it for a long time, this would be a problem. The program would be using more memory than it needs to be, and might overwhelm the system and cause it to run out of memory available to use. Now, as you can see, creating reference cycles is difficult and inconvenient in -Rust. But it's not impossible: preventing memory leaks in the form of reference +Rust. But it’s not impossible: preventing memory leaks in the form of reference cycles is not one of the guarantees Rust makes. If you have `RefCell` values that contain `Rc` values or similar nested combinations of types with -interior mutability and reference counting, be aware that you'll have to ensure -that you don't create cycles. In the example in Listing 15-14, the solution +interior mutability and reference counting, be aware that you’ll have to ensure +that you don’t create cycles. In the example in Listing 15-14, the solution would probably be to not write code that could create cycles like this, since we do want `Cons` variants to own the list they point to. -With data structures like graphs, it's sometimes necessary to have references +With data structures like graphs, it’s sometimes necessary to have references that create cycles in order to have parent nodes point to their children and children nodes point back in the opposite direction to their parents, for -example. If one of the directions is expressing ownership and the other isn't, +example. If one of the directions is expressing ownership and the other isn’t, one way of being able to model the relationship of the data without creating -reference cycles and memory leaks is using `Weak`. Let's explore that next! +reference cycles and memory leaks is using `Weak`. Let’s explore that next! ### Prevent Reference Cycles: Turn an `Rc` into a `Weak` The Rust standard library provides `Weak`, a smart pointer type for use in situations that have cycles of references but only one direction expresses -ownership. We've been showing how cloning an `Rc` increases the +ownership. We’ve been showing how cloning an `Rc` increases the `strong_count` of references; `Weak` is a way to reference an `Rc` that does not increment the `strong_count`: instead it increments the `weak_count` of references to an `Rc`. When an `Rc` goes out of scope, the inner value will @@ -1140,13 +1140,13 @@ be able to get the value from a `Weak`, we first have to upgrade it to an `Weak` will be `Some` if the `Rc` value has not been dropped yet, and `None` if the `Rc` value has been dropped. Because `upgrade` returns an `Option`, we know Rust will make sure we handle both the `Some` case and the `None` case and -we won't be trying to use an invalid pointer. +we won’t be trying to use an invalid pointer. Instead of the list in Listing 15-17 where each item knows only about the -next item, let's say we want a tree where the items know about their children +next item, let’s say we want a tree where the items know about their children items *and* their parent items. -Let's start just with a struct named `Node` that holds its own `i32` value as +Let’s start just with a struct named `Node` that holds its own `i32` value as well as references to its children `Node` values: Filename: src/main.rs @@ -1163,10 +1163,10 @@ struct Node { ``` We want to be able to have a `Node` own its children, and we also want to be -able to have variables own each node so we can access them directly. That's why +able to have variables own each node so we can access them directly. That’s why the items in the `Vec` are `Rc` values. We want to be able to modify what -nodes are another node's children, so that's why we have a `RefCell` in -`children` around the `Vec`. In Listing 15-19, let's create one instance of +nodes are another node’s children, so that’s why we have a `RefCell` in +`children` around the `Vec`. In Listing 15-19, let’s create one instance of `Node` named `leaf` with the value 3 and no children, and another instance named `branch` with the value 5 and `leaf` as one of its children: @@ -1191,18 +1191,18 @@ Listing 15-19: Creating a `leaf` node and a `branch` node where `branch` has The `Node` in `leaf` now has two owners: `leaf` and `branch`, since we clone the `Rc` in `leaf` and store that in `branch`. The `Node` in `branch` knows -it's related to `leaf` since `branch` has a reference to `leaf` in -`branch.children`. However, `leaf` doesn't know that it's related to `branch`, -and we'd like `leaf` to know that `branch` is its parent. +it’s related to `leaf` since `branch` has a reference to `leaf` in +`branch.children`. However, `leaf` doesn’t know that it’s related to `branch`, +and we’d like `leaf` to know that `branch` is its parent. -To do that, we're going to add a `parent` field to our `Node` struct -definition, but what should the type of `parent` be? We know it can't contain +To do that, we’re going to add a `parent` field to our `Node` struct +definition, but what should the type of `parent` be? We know it can’t contain an `Rc`, since `leaf.parent` would point to `branch` and `branch.children` contains a pointer to `leaf`, which makes a reference cycle. Neither `leaf` nor `branch` would get dropped since they would always refer to each other and their reference counts would never be zero. -So instead of `Rc`, we're going to make the type of `parent` use `Weak`, +So instead of `Rc`, we’re going to make the type of `parent` use `Weak`, specifically a `RefCell>`: Filename: src/main.rs @@ -1221,8 +1221,8 @@ struct Node { This way, a node will be able to refer to its parent node if it has one, but it does not own its parent. A parent node will be dropped even if -it has child nodes referring to it, as long as it doesn't have a parent -node as well. Now let's update `main` to look like Listing 15-20: +it has child nodes referring to it, as long as it doesn’t have a parent +node as well. Now let’s update `main` to look like Listing 15-20: Filename: src/main.rs @@ -1253,7 +1253,7 @@ reference to its parent, `branch` Creating the `leaf` node looks similar; since it starts out without a parent, we create a new `Weak` reference instance. When we try to get a reference to -the parent of `leaf` by using the `upgrade` method, we'll get a `None` value, +the parent of `leaf` by using the `upgrade` method, we’ll get a `None` value, as shown by the first `println!` that outputs: ``` @@ -1268,8 +1268,8 @@ method on the `RefCell` in the `parent` field of `leaf`, then we use the `Rc::downgrade` function to create a `Weak` reference to `branch` from the `Rc` in `branch.` -When we print out the parent of `leaf` again, this time we'll get a `Some` -variant holding `branch`. Also notice we don't get a cycle printed out that +When we print out the parent of `leaf` again, this time we’ll get a `Some` +variant holding `branch`. Also notice we don’t get a cycle printed out that eventually ends in a stack overflow like we did in Listing 15-14: the `Weak` references are just printed as `(Weak)`: @@ -1279,10 +1279,10 @@ children: RefCell { value: [Node { value: 3, parent: RefCell { value: (Weak) }, children: RefCell { value: [] } }] } }) ``` -The fact that we don't get infinite output (or at least until the stack -overflows) is one way we can see that we don't have a reference cycle in this +The fact that we don’t get infinite output (or at least until the stack +overflows) is one way we can see that we don’t have a reference cycle in this case. Another way we can tell is by looking at the values we get from calling -`Rc::strong_count` and `Rc::weak_count`. In Listing 15-21, let's create a new +`Rc::strong_count` and `Rc::weak_count`. In Listing 15-21, let’s create a new inner scope and move the creation of `branch` in there, so that we can see what happens when `branch` is created and then dropped when it goes out of scope: @@ -1345,10 +1345,10 @@ count of 0. When the inner scope ends, `branch` goes out of scope, and its strong count decreases to 0, so its `Node` gets dropped. The weak count of 1 from -`leaf.parent` has no bearing on whether `Node` gets dropped or not, so we don't +`leaf.parent` has no bearing on whether `Node` gets dropped or not, so we don’t have a memory leak! -If we try to access the parent of `leaf` after the end of the scope, we'll get +If we try to access the parent of `leaf` after the end of the scope, we’ll get `None` again like we did before `leaf` had a parent. At the end of the program, `leaf` has a strong count of 1 and a weak count of 0, since `leaf` is now the only thing pointing to it again. @@ -1356,13 +1356,13 @@ only thing pointing to it again. All of the logic managing the counts and whether a value should be dropped or not was managed by `Rc` and `Weak` and their implementations of the `Drop` trait. By specifying that the relationship from a child to its parent should be -a `Weak` reference in the definition of `Node`, we're able to have parent +a `Weak` reference in the definition of `Node`, we’re able to have parent nodes point to child nodes and vice versa without creating a reference cycle and memory leaks. ## Summary -We've now covered how you can use different kinds of smart pointers to choose +We’ve now covered how you can use different kinds of smart pointers to choose different guarantees and tradeoffs than those Rust makes with regular references. `Box` has a known size and points to data allocated on the heap. `Rc` keeps track of the number of references to data on the heap so that @@ -1370,8 +1370,8 @@ data can have multiple owners. `RefCell` with its interior mutability gives us a type that can be used where we need an immutable type, and enforces the borrowing rules at runtime instead of at compile time. -We've also discussed the `Deref` and `Drop` traits that enable a lot of smart -pointers' functionality. We explored how it's possible to create a reference +We’ve also discussed the `Deref` and `Drop` traits that enable a lot of smart +pointers’ functionality. We explored how it’s possible to create a reference cycle that would cause a memory leak, and how to prevent reference cycles by using `Weak`. @@ -1380,5 +1380,5 @@ smart pointers, check out The Nomicon at *https://doc.rust-lang.org/stable/nomicon/vec.html* for even more useful information. -Next, let's talk about concurrency in Rust. We'll even learn about a few new +Next, let’s talk about concurrency in Rust. We’ll even learn about a few new smart pointers that can help us with it. diff --git a/src/doc/book/second-edition/nostarch/chapter16.md b/src/doc/book/second-edition/nostarch/chapter16.md index e79e7f651b..dc2435ed39 100644 --- a/src/doc/book/second-edition/nostarch/chapter16.md +++ b/src/doc/book/second-edition/nostarch/chapter16.md @@ -3,7 +3,7 @@ # Fearless Concurrency -Ensuring memory safety isn't Rust's only goal: being a language that is better +Ensuring memory safety isn’t Rust’s only goal: being a language that is better equipped to handle concurrent and parallel programming has always been another major goal of Rust. *Concurrent programming*, where different parts of a program execute independently, and *parallel programming*, where different @@ -17,34 +17,34 @@ were two separate challenges to be solved with different methods. However, over time, we discovered that ownership and the type system are a powerful set of tools that help in dealing with both memory safety *and* concurrency problems! By leveraging ownership and type checking, many concurrency errors are *compile -time* errors in Rust, rather than runtime errors. We've nicknamed this aspect +time* errors in Rust, rather than runtime errors. We’ve nicknamed this aspect of Rust *fearless concurrency*. Fearless concurrency means Rust not only allows you to have confidence that your code is free of subtle bugs, but also lets you refactor this kind of code easily without worrying about introducing new bugs. -> Note: given that Rust's slogan is *fearless concurrency*, we'll be referring +> Note: given that Rust’s slogan is *fearless concurrency*, we’ll be referring > to many of the problems here as *concurrent* rather than being more precise -> by saying *concurrent and/or parallel*, for simplicity's sake. If this were a -> book specifically about concurrency and/or parallelism, we'd be sure to be +> by saying *concurrent and/or parallel*, for simplicity’s sake. If this were a +> book specifically about concurrency and/or parallelism, we’d be sure to be > more specific. For this chapter, please mentally substitute > *concurrent and/or parallel* whenever we say *concurrent*. Many languages are strongly opinionated about the solutions they offer you to -deal with concurrent problems. That's a very reasonable strategy, especially -for higher-level languages, but lower-level languages don't have that luxury. +deal with concurrent problems. That’s a very reasonable strategy, especially +for higher-level languages, but lower-level languages don’t have that luxury. Lower-level languages are expected to enable whichever solution would provide the best performance in a given situation, and they have fewer abstractions over the hardware. Rust, therefore, gives us a variety of tools for modeling our problems in whatever way is appropriate for our situation and requirements. -Here's what we'll cover in this chapter: +Here’s what we’ll cover in this chapter: * How to create threads to run multiple pieces of code at the same time * *Message passing* concurrency, where channels are used to send messages between threads. * *Shared state* concurrency, where multiple threads have access to some piece of data. -* The `Sync` and `Send` traits, which allow Rust's concurrency guarantees to be +* The `Sync` and `Send` traits, which allow Rust’s concurrency guarantees to be extended to user-defined types as well as types provided by the standard library. @@ -64,7 +64,7 @@ this functionality is called *threads*. Splitting up the computation your program needs to do into multiple threads can improve performance, since the program will be doing multiple things at the same time. Programming with threads can add complexity, however. Since threads -run simultaneously, there's no inherent guarantee about the order in which the +run simultaneously, there’s no inherent guarantee about the order in which the parts of your code on different threads will run. This can lead to race conditions where threads are accessing data or resources in an inconsistent order, deadlocks where two threads both prevent each other from continuing, or @@ -85,12 +85,12 @@ threads is sometimes called *1:1*, one OS thread per one language thread. The green threaded model is called the *M:N* model, `M` green threads per `N` OS threads, where `M` and `N` are not necessarily the same number. -Each model has its own advantages and tradeoffs. The tradeoff that's most +Each model has its own advantages and tradeoffs. The tradeoff that’s most important to Rust is runtime support. *Runtime* is a confusing term; it can have different meaning in different contexts. Here, we mean some code included by the language in every binary. For some languages, this code is large, and -for others, this code is small. Colloquially, "no runtime" is often what people -will say when they mean "small runtime", since every non-assembly language has +for others, this code is small. Colloquially, “no runtime” is often what people +will say when they mean “small runtime”, since every non-assembly language has some amount of runtime. Smaller runtimes have fewer features but have the advantage of resulting in smaller binaries. Smaller binaries make it easier to combine the language with other languages in more contexts. While many @@ -105,7 +105,7 @@ language, there are crates that implement M:N threading if you would rather trade overhead for aspects such as more control over which threads run when and lower costs of context switching, for example. -Now that we've defined what threads are in Rust, let's explore how to use the +Now that we’ve defined what threads are in Rust, let’s explore how to use the thread-related API that the standard library provides for us. ### Creating a New Thread with `spawn` @@ -152,11 +152,11 @@ hi number 4 from the spawned thread! hi number 5 from the spawned thread! ``` -The threads will probably take turns, but that's not guaranteed. In this run, +The threads will probably take turns, but that’s not guaranteed. In this run, the main thread printed first, even though the print statement from the spawned thread appears first in the code we wrote. And even though we told the spawned thread to print until `i` is 9, it only got to 5 before the main thread shut -down. If you always only see one thread, or if you don't see any overlap, try +down. If you always only see one thread, or if you don’t see any overlap, try increasing the numbers in the ranges to create more opportunities for a thread to take a break and give the other thread a turn. @@ -164,7 +164,7 @@ to take a break and give the other thread a turn. Not only does the code in Listing 16-1 not allow the spawned thread to finish most of the time since the main thread ends before the spawned thread is done, -there's actualy no guarantee that the spawned thread will get to run at all! We +there’s actualy no guarantee that the spawned thread will get to run at all! We can fix this by saving the return value of `thread::spawn`, which is a `JoinHandle`. That looks like Listing 16-2: @@ -194,7 +194,7 @@ thread is run to completion A `JoinHandle` is an owned value that can wait for a thread to finish, which is what the `join` method does. By calling `join` on the handle, the current thread will block until the thread that the handle represents terminates. Since -we've put the call to `join` after the main thread's `for` loop, running this +we’ve put the call to `join` after the main thread’s `for` loop, running this example should produce output that looks something like this: ``` @@ -239,7 +239,7 @@ fn main() { ``` The main thread will wait for the spawned thread to finish before the main -thread starts running its `for` loop, so the output won't be interleaved +thread starts running its `for` loop, so the output won’t be interleaved anymore: ``` @@ -263,21 +263,21 @@ your threads are actually running at the same time or not. ### Using `move` Closures with Threads -There's a feature of closures that we didn't cover in Chapter 13 that's often +There’s a feature of closures that we didn’t cover in Chapter 13 that’s often useful with `thread::spawn`: `move` closures. We said this in Chapter 13: > Creating closures that capture values from their environment is mostly used > in the context of starting new threads. -Now we're creating new threads, so let's talk about capturing values in +Now we’re creating new threads, so let’s talk about capturing values in closures! Notice the closure that we pass to `thread::spawn` in Listing 16-1 takes no -arguments: we're not using any data from the main thread in the spawned -thread's code. In order to use data in the spawned thread that comes from the -main thread, we need the spawned thread's closure to capture the values it +arguments: we’re not using any data from the main thread in the spawned +thread’s code. In order to use data in the spawned thread that comes from the +main thread, we need the spawned thread’s closure to capture the values it needs. Listing 16-3 shows an attempt to create a vector in the main thread and -use it in the spawned thread, which won't work the way this example is written: +use it in the spawned thread, which won’t work the way this example is written: Filename: src/main.rs @@ -299,10 +299,10 @@ Listing 16-3: Attempting to use a vector created by the main thread from another thread The closure uses `v`, so the closure will capture `v` and make `v` part of the -closure's environment. Because `thread::spawn` runs this closure in a new +closure’s environment. Because `thread::spawn` runs this closure in a new thread, we can access `v` inside that new thread. -When we compile this example, however, we'll get the following error: +When we compile this example, however, we’ll get the following error: ``` error[E0373]: closure may outlive the current function, but it borrows `v`, @@ -319,14 +319,14 @@ variables), use the `move` keyword, as shown: | let handle = thread::spawn(move || { ``` -When we capture something in a closure's environment, Rust will try to infer +When we capture something in a closure’s environment, Rust will try to infer how to capture it. `println!` only needs a reference to `v`, so the closure -tries to borrow `v`. There's a problem, though: we don't know how long the -spawned thread will run, so we don't know if the reference to `v` will always +tries to borrow `v`. There’s a problem, though: we don’t know how long the +spawned thread will run, so we don’t know if the reference to `v` will always be valid. -Consider the code in Listing 16-4 that shows a scenario where it's more likely -that the reference to `v` won't be valid: +Consider the code in Listing 16-4 that shows a scenario where it’s more likely +that the reference to `v` won’t be valid: Filename: src/main.rs @@ -365,7 +365,7 @@ variables), use the `move` keyword, as shown: ``` By adding the `move` keyword before the closure, we force the closure to take -ownership of the values it's using, rather than inferring borrowing. This +ownership of the values it’s using, rather than inferring borrowing. This modification to the code from Listing 16-3 shown in Listing 16-5 will compile and run as we intend: @@ -389,7 +389,7 @@ Listing 16-5: Using the `move` keyword to force a closure to take ownership of the values it uses What about the code in Listing 16-4 where the main thread called `drop`? If we -add `move` to the closure, we've moved `v` into the closure's environment, and +add `move` to the closure, we’ve moved `v` into the closure’s environment, and we can no longer call `drop` on it. We get this compiler error instead: ``` @@ -406,16 +406,16 @@ error[E0382]: use of moved value: `v` not implement the `Copy` trait ``` -Rust's ownership rules have saved us again! +Rust’s ownership rules have saved us again! -Now that we have a basic understanding of threads and the thread API, let's +Now that we have a basic understanding of threads and the thread API, let’s talk about what we can actually *do* with threads. ## Message Passing to Transfer Data Between Threads -One approach to concurrency that's seen a rise in popularity as of late is +One approach to concurrency that’s seen a rise in popularity as of late is *message passing*, where threads or actors communicate by sending each other -messages containing data. Here's the idea in slogan form: +messages containing data. Here’s the idea in slogan form: > Do not communicate by sharing memory; instead, share memory by > communicating. @@ -427,11 +427,11 @@ halves, a transmitter and a receiver. One part of our code can call methods on the transmitter with the data we want to send, and another part can check the receiving end for arriving messages. -We're going to work up to an example where we have one thread that will +We’re going to work up to an example where we have one thread that will generate values and send them down a channel. The main thread will receive the values and print them out. -First, though, let's start by creating a channel but not doing anything with it +First, though, let’s start by creating a channel but not doing anything with it in Listing 16-6: Filename: src/main.rs @@ -449,17 +449,17 @@ Listing 16-6: Creating a channel and assigning the two halves to `tx` and `rx` The `mpsc::channel` function crates a new channel. `mpsc` stands for *multiple producer, single consumer*. In short, we can have multiple *sending* ends of a channel that produce values, but only one *receiving* end that consumes those -values. We're going to start with a single producer for now, but we'll add +values. We’re going to start with a single producer for now, but we’ll add multiple producers once we get this example working. `mpsc::channel` returns a tuple: the first element is the sending end, and the second element is the receiving end. For historical reasons, many people use `tx` and `rx` to abbreviate *transmitter* and *receiver*, so those are the -names we're using for the variables bound to each end. We're using a `let` -statement with a pattern that destructures the tuples; we'll be discussing the +names we’re using for the variables bound to each end. We’re using a `let` +statement with a pattern that destructures the tuples; we’ll be discussing the use of patterns in `let` statements and destructuring in Chapter 18. -Let's move the transmitting end into a spawned thread and have it send one +Let’s move the transmitting end into a spawned thread and have it send one string, shown in Listing 16-7: Filename: src/main.rs @@ -478,21 +478,21 @@ fn main() { } ``` -Listing 16-7: Moving `tx` to a spawned thread and sending "hi" +Listing 16-7: Moving `tx` to a spawned thread and sending “hi” -We're using `thread::spawn` to create a new thread, just as we did in the +We’re using `thread::spawn` to create a new thread, just as we did in the previous section. We use a `move` closure to make `tx` move into the closure so that the thread owns it. The transmitting end of a channel has the `send` method that takes the value we want to send down the channel. The `send` method returns a `Result` type, -because if the receiving end has already been dropped, there's nowhere to send -a value to, so the send operation would error. In this example, we're simply -calling `unwrap` to ignore this error, but for a real application, we'd want to -handle it properly. Chapter 9 is where you'd go to review strategies for proper +because if the receiving end has already been dropped, there’s nowhere to send +a value to, so the send operation would error. In this example, we’re simply +calling `unwrap` to ignore this error, but for a real application, we’d want to +handle it properly. Chapter 9 is where you’d go to review strategies for proper error handling. -In Listing 16-8, let's get the value from the receiving end of the channel in +In Listing 16-8, let’s get the value from the receiving end of the channel in the main thread: Filename: src/main.rs @@ -514,16 +514,16 @@ fn main() { } ``` -Listing 16-8: Receiving the value "hi" in the main thread and printing it out +Listing 16-8: Receiving the value “hi” in the main thread and printing it out The receiving end of a channel has two useful methods: `recv` and `try_recv`. -Here, we're using `recv`, which is short for *receive*. This method will block +Here, we’re using `recv`, which is short for *receive*. This method will block execution until a value is sent down the channel. Once a value is sent, `recv` will return it in a `Result`. When the sending end of the channel closes, `recv` will return an error. The `try_recv` method will not block; it instead returns a `Result` immediately. -If we run the code in Listing 16-8, we'll see the value printed out from the +If we run the code in Listing 16-8, we’ll see the value printed out from the main thread: ``` @@ -532,8 +532,8 @@ Got: hi ### How Channels Interact with Ownership -Let's do an experiment at this point to see how channels and ownership work -together: we'll try to use `val` in the spawned thread after we've sent it down +Let’s do an experiment at this point to see how channels and ownership work +together: we’ll try to use `val` in the spawned thread after we’ve sent it down the channel. Try compiling the code in Listing 16-9: Filename: src/main.rs @@ -558,8 +558,8 @@ fn main() { Listing 16-9: Attempting to use `val` after we have sent it down the channel -Here, we try to print out `val` after we've sent it down the channel via -`tx.send`. This is a bad idea: once we've sent the value to another thread, +Here, we try to print out `val` after we’ve sent it down the channel via +`tx.send`. This is a bad idea: once we’ve sent the value to another thread, that thread could modify it or drop it before we try to use the value again. This could cause errors or unexpected results due to inconsistent or nonexistent data. @@ -581,18 +581,18 @@ error[E0382]: use of moved value: `val` Our concurrency mistake has caused a compile-time error! `send` takes ownership of its parameter and moves the value so that the value is owned by the -receiver. This means we can't accidentally use the value again after sending +receiver. This means we can’t accidentally use the value again after sending it; the ownership system checks that everything is okay. In this regard, message passing is very similar to single ownership in Rust. Message passing enthusiasts enjoy message passing for similar reasons that -Rustaceans enjoy Rust's ownership: single ownership means certain classes of -problems go away. If only one thread at a time can use some memory, there's no +Rustaceans enjoy Rust’s ownership: single ownership means certain classes of +problems go away. If only one thread at a time can use some memory, there’s no chance of a data race. ### Sending Multiple Values and Seeing the Receiver Waiting -The code in Listing 16-8 compiled and ran, but it wasn't very interesting: it's +The code in Listing 16-8 compiled and ran, but it wasn’t very interesting: it’s hard to see that we have two separate threads talking to each other over a channel. Listing 16-10 has some modifications that will prove to us that this code is running concurrently: the spawned thread will now send multiple @@ -635,11 +635,11 @@ send to the main thread. We iterate over them, sending each individually and then pausing by calling the `thread::sleep` function with a `Duration` value of one second. -In the main thread, we're not calling the `recv` function explicitly anymore: -instead we're treating `rx` as an iterator. For each value received, we're +In the main thread, we’re not calling the `recv` function explicitly anymore: +instead we’re treating `rx` as an iterator. For each value received, we’re printing it out. When the channel is closed, iteration will end. -When running the code in Listing 16-10, we'll see this output, with a one second +When running the code in Listing 16-10, we’ll see this output, with a one second pause in between each line: ``` @@ -649,7 +649,7 @@ Got: the Got: thread ``` -We don't have any pausing or code that would take a while in the `for` loop in +We don’t have any pausing or code that would take a while in the `for` loop in the main thread, so we can tell that the main thread is waiting to receive values from the spawned thread. @@ -701,11 +701,11 @@ Listing 16-11: Sending multiple messages and pausing between each one This time, before we create the first spawned thread, we call `clone` on the sending end of the channel. This will give us a new sending handle that we can -pass to the first spawned thread. We'll pass the original sending end of the +pass to the first spawned thread. We’ll pass the original sending end of the channel to a second spawned thread, and each thread is sending different messages to the receiving end of the channel. -If you run this, you'll *probably* see output like this: +If you run this, you’ll *probably* see output like this: ``` Got: hi @@ -724,17 +724,17 @@ play around with `thread::sleep`, giving it different values in the different threads, you can make the runs more non-deterministic and create different output each time. -Now that we've seen how channels work, let's look at shared-memory concurrency. +Now that we’ve seen how channels work, let’s look at shared-memory concurrency. ## Shared State Concurrency -While message passing is a fine way of dealing with concurrency, it's not the +While message passing is a fine way of dealing with concurrency, it’s not the only one. Consider this slogan again: > Do not communicate by sharing memory; instead, share memory by > communicating. -What would "communicate by sharing memory" look like? And moreover, why would +What would “communicate by sharing memory” look like? And moreover, why would message passing enthusiasts dislike it, and dislike it enough to invert it entirely? @@ -744,19 +744,19 @@ same memory location at the same time. As we saw with multiple ownership made possible by smart pointers in Chapter 15, multiple ownership can add additional complexity, since we need to manage these different owners somehow. -Rust's type system and ownership can help a lot here in getting this management -correct, though. For an example, let's look at one of the more common +Rust’s type system and ownership can help a lot here in getting this management +correct, though. For an example, let’s look at one of the more common concurrency primitives for shared memory: mutexes. ### Mutexes Allow Access to Data from One Thread at a Time -A *mutex* is a concurrency primitive for sharing memory. It's short for "mutual -exclusion", that is, it only allows one thread to access some data at any given -time. Mutexes have a reputation for being hard to use, since there's a lot you +A *mutex* is a concurrency primitive for sharing memory. It’s short for “mutual +exclusion”, that is, it only allows one thread to access some data at any given +time. Mutexes have a reputation for being hard to use, since there’s a lot you have to remember: 1. You have to remember to attempt to acquire the lock before using the data. -2. One you're done with the data that's being guarded by the mutex, you have +2. One you’re done with the data that’s being guarded by the mutex, you have to remember to unlock the data so other threads can acquire the lock. For a real-world example of a mutex, imagine a panel discussion at a conference @@ -770,13 +770,13 @@ speak if a panelist forgot to hand the microphone to the next person when they finished using it. If the management of the shared microphone went wrong in any of these ways, the panel would not work as planned! -Management of mutexes can be incredibly tricky to get right, and that's why so -many people are enthusiastic about channels. However, in Rust, we can't get +Management of mutexes can be incredibly tricky to get right, and that’s why so +many people are enthusiastic about channels. However, in Rust, we can’t get locking and unlocking wrong, thanks to the type system and ownership. #### The API of `Mutex` -Let's look at an example of using a mutex in Listing 16-12, without involving +Let’s look at an example of using a mutex in Listing 16-12, without involving multiple threads for the moment: Filename: src/main.rs @@ -801,9 +801,9 @@ simplicity Like many types, we create a `Mutex` through an associated function named `new`. To access the data inside the mutex, we use the `lock` method to acquire -the lock. This call will block until it's our turn to have the lock. This call +the lock. This call will block until it’s our turn to have the lock. This call can fail if another thread was holding the lock and then that thread panicked. -In a similar way as we did in Listing 16-6 in the last section, we're using +In a similar way as we did in Listing 16-6 in the last section, we’re using `unwrap()` for now, rather than better error handling. See Chapter 9 for better tools. @@ -811,13 +811,13 @@ Once we have acquired the lock, we can treat the return value, named `num` in this case, as a mutable reference to the data inside. The type system is how Rust ensures that we acquire a lock before using this value: `Mutex` is not an `i32`, so we *must* acquire the lock in order to be able to use the -`i32` value. We can't forget; the type system won't let us do otherwise. +`i32` value. We can’t forget; the type system won’t let us do otherwise. As you may have suspected, `Mutex` is a smart pointer. Well, more accurately, the call to `lock` returns a smart pointer called `MutexGuard`. This smart pointer implements `Deref` to point at our inner data, similar to the other smart pointers we saw in Chapter 15. In addition, `MutexGuard` has a -`Drop` implementation that releases the lock. This way, we can't forget to +`Drop` implementation that releases the lock. This way, we can’t forget to release the lock. It happens for us automatically when the `MutexGuard` goes out of scope, which it does at the end of the inner scope in Listing 16-12. We can print out the mutex value and see that we were able to change the inner @@ -825,10 +825,10 @@ can print out the mutex value and see that we were able to change the inner #### Sharing a `Mutex` Between Multiple Threads -Let's now try to share a value between multiple threads using `Mutex`. We'll +Let’s now try to share a value between multiple threads using `Mutex`. We’ll spin up ten threads, and have them each increment a counter value by 1 so that the counter goes from 0 to 10. Note that the next few examples will have -compiler errors, and we're going to use those errors to learn more about using +compiler errors, and we’re going to use those errors to learn more about using `Mutex` and how Rust helps us use it correctly. Listing 16-13 has our starting example: @@ -862,20 +862,20 @@ fn main() { Listing 16-13: The start of a program having 10 threads each increment a counter guarded by a `Mutex` -We're creating a `counter` variable to hold an `i32` inside a `Mutex`, like -we did in Listing 16-12. Next, we're creating 10 threads by mapping over a +We’re creating a `counter` variable to hold an `i32` inside a `Mutex`, like +we did in Listing 16-12. Next, we’re creating 10 threads by mapping over a range of numbers. We use `thread::spawn` and give all the threads the same -closure: they're each going to acquire a lock on the `Mutex` by calling the +closure: they’re each going to acquire a lock on the `Mutex` by calling the `lock` method and then add 1 to the value in the mutex. When a thread finishes running its closure, `num` will go out of scope and release the lock so that another thread can acquire it. -In the main thread, we're collecting all the join handles like we did in +In the main thread, we’re collecting all the join handles like we did in Listing 16-2, and then calling `join` on each of them to make sure all the threads finish. At that point, the main thread will acquire the lock and print out the result of this program. -We hinted that this example won't compile, let's find out why! +We hinted that this example won’t compile, let’s find out why! ``` error[E0373]: closure may outlive the current function, but it borrows @@ -893,16 +893,16 @@ referenced variables), use the `move` keyword, as shown: ``` This is similar to the problem we solved in Listing 16-5. Given that we spin up -multiple threads, Rust can't know how long the threads will run and whether +multiple threads, Rust can’t know how long the threads will run and whether `counter` will still be valid when each thread tries to borrow it. The help message has a reminder for how to solve this: we can use `move` to give -ownership to each thread. Let's try it by making this change to the closure: +ownership to each thread. Let’s try it by making this change to the closure: ``` thread::spawn(move || { ``` -And trying to compile again. We'll get different errors this time! +And trying to compile again. We’ll get different errors this time! ``` error[E0382]: capture of moved value: `counter` @@ -931,12 +931,12 @@ error[E0382]: use of moved value: `counter` error: aborting due to 2 previous errors ``` -`move` didn't fix this program like it fixed Listing 16-5. Why not? This error -message is a little confusing to read, because it's saying that the `counter` +`move` didn’t fix this program like it fixed Listing 16-5. Why not? This error +message is a little confusing to read, because it’s saying that the `counter` value is moved into the closure, then is captured when we call `lock`. That -sounds like what we wanted, but it's not allowed. +sounds like what we wanted, but it’s not allowed. -Let's reason this out. Instead of making 10 threads in a `for` loop, let's just +Let’s reason this out. Instead of making 10 threads in a `for` loop, let’s just make two threads without a loop and see what happens then. Replace the first `for` loop in Listing 16-13 with this code instead: @@ -956,9 +956,9 @@ let handle2 = thread::spawn(move || { handles.push(handle2); ``` -Here we're making 2 threads, and we changed the variable names used with the -second thread to `handle2` and `num2`. We're simplifying our example for the -moment to see if we can understand the error message we're getting. This time, +Here we’re making 2 threads, and we changed the variable names used with the +second thread to `handle2` and `num2`. We’re simplifying our example for the +moment to see if we can understand the error message we’re getting. This time, compiling gives us: ``` @@ -993,18 +993,18 @@ Aha! In the first error message, Rust is showing us that `counter` is moved into the closure for the thread that goes with `handle`. That move is preventing us from capturing `counter` when we try to call `lock` on it and store the result in `num2`, which is in the second thread! So Rust is telling -us that we can't move ownership of `counter` into multiple threads. This was +us that we can’t move ownership of `counter` into multiple threads. This was hard to see before since we were creating multiple threads in a loop, and Rust -can't point to different threads in different iterations of the loop. +can’t point to different threads in different iterations of the loop. #### Multiple Ownership with Multiple Threads In Chapter 15, we were able to have multiple ownership of a value by using the smart pointer `Rc` to create a reference-counted value. We mentioned in -Chapter 15 that `Rc` was only for single-threaded contexts, but let's try -using `Rc` in this case anyway and see what happens. We'll wrap the +Chapter 15 that `Rc` was only for single-threaded contexts, but let’s try +using `Rc` in this case anyway and see what happens. We’ll wrap the `Mutex` in `Rc` in Listing 16-14, and clone the `Rc` before moving -ownership to the thread. We'll switch back to the `for` loop for creating the +ownership to the thread. We’ll switch back to the `for` loop for creating the threads, and keep the `move` keyword with the closure: Filename: src/main.rs @@ -1059,38 +1059,38 @@ std::marker::Send` is not satisfied = note: required by `std::thread::spawn` ``` -Wow, that's quite wordy! Some important parts to pick out: the first note says +Wow, that’s quite wordy! Some important parts to pick out: the first note says `Rc> cannot be sent between threads safely`. The reason for this is in the error message, which, once distilled, says `the trait bound Send is not -satisfied`. We're going to talk about `Send` in the next section; it's one of +satisfied`. We’re going to talk about `Send` in the next section; it’s one of the traits that ensures the types we use with threads are meant for use in concurrent situations. Unfortunately, `Rc` is not safe to share across threads. When `Rc` manages the reference count, it has to add to the count for each call to -`clone` and subtract from the count when each clone is dropped. `Rc` doesn't +`clone` and subtract from the count when each clone is dropped. `Rc` doesn’t use any concurrency primitives to make sure that changes to the count happen in -an operation that couldn't be interrupted by another thread. This could lead to +an operation that couldn’t be interrupted by another thread. This could lead to subtle bugs where the counts are wrong, which could lead to memory leaks or -dropping a value before we're done with it. So what if we had a type that was +dropping a value before we’re done with it. So what if we had a type that was exactly like `Rc`, but made changes to the reference count in a thread-safe way? #### Atomic Reference Counting with `Arc` -If you thought that question sounded like a leading one, you'd be right. There -is a type like `Rc` that's safe to use in concurrent situations: `Arc`. -The 'a' stands for *atomic*, so it's an *atomically reference counted* type. -Atomics are an additional kind of concurrency primitive that we won't cover +If you thought that question sounded like a leading one, you’d be right. There +is a type like `Rc` that’s safe to use in concurrent situations: `Arc`. +The ‘a’ stands for *atomic*, so it’s an *atomically reference counted* type. +Atomics are an additional kind of concurrency primitive that we won’t cover here; see the standard library documentation for `std::sync::atomic` for more details. The gist of it is this: atomics work like primitive types, but are safe to share across threads. -Why aren't all primitive types atomic, and why aren't all standard library +Why aren’t all primitive types atomic, and why aren’t all standard library types implemented to use `Arc` by default? Thread safety comes with a -performance penalty that we only want to pay when we need it. If we're only +performance penalty that we only want to pay when we need it. If we’re only doing operations on values within a single thread, our code can run faster -since it doesn't need the guarantees that atomics give us. +since it doesn’t need the guarantees that atomics give us. Back to our example: `Arc` and `Rc` are identical except for the atomic internals of `Arc`. Their API is the same, so we can change the `use` line @@ -1133,7 +1133,7 @@ Result: 10 We did it! We counted from 0 to 10, which may not seem very impressive, but we learned a lot about `Mutex` and thread safety along the way! The structure -that we've built in this example could be used to do more complicated +that we’ve built in this example could be used to do more complicated operations than just incrementing a counter. Calculations that can be divided up into independent parts could be split across threads in this way, and we can use a `Mutex` to allow each thread to update the final result with its part. @@ -1147,35 +1147,35 @@ use `Mutex` to be able to mutate contents inside of an `Arc`. Recall that `Rc` did not prevent every possible problem: we also talked about the possibility of creating reference cycles where two `Rc` values refer to each other, which would cause a memory leak. We have a similar problem -with `Mutex` that Rust also doesn't prevent: deadlocks. A *deadlock* is a +with `Mutex` that Rust also doesn’t prevent: deadlocks. A *deadlock* is a situation in which an operation needs to lock two resources, and two threads have each acquired one of the locks and will now wait for each other forever. -If you're interested in this topic, try creating a Rust program that has a +If you’re interested in this topic, try creating a Rust program that has a deadlock, then research deadlock mitigation strategies that apply to the use of mutexes in any language and try implementing them in Rust. The standard library API documentation for `Mutex` and `MutexGuard` will have useful information. -Rust's type system and ownership has made sure that our threads have exclusive -access to the shared value when they're updating it, so the threads won't -overwrite each other's answers in unpredictable ways. It took us a while to -work with the compiler to get everything right, but we've saved future time +Rust’s type system and ownership has made sure that our threads have exclusive +access to the shared value when they’re updating it, so the threads won’t +overwrite each other’s answers in unpredictable ways. It took us a while to +work with the compiler to get everything right, but we’ve saved future time that might be spent trying to reproduce subtly incorrect scenarios that only happen when the threads run in a particular order. -Let's round out this chapter by talking about the `Send` and `Sync` traits and +Let’s round out this chapter by talking about the `Send` and `Sync` traits and how we could use them with custom types. ## Extensible Concurrency with the `Sync` and `Send` Traits -One interesting aspect of Rust's concurrency model is that the language knows -*very* little about concurrency. Almost everything we've been talking about so +One interesting aspect of Rust’s concurrency model is that the language knows +*very* little about concurrency. Almost everything we’ve been talking about so far has been part of the standard library, not the language itself. Because we -don't need the language to provide everything we need to program in a -concurrent context, we're not limited to the concurrency options that the +don’t need the language to provide everything we need to program in a +concurrent context, we’re not limited to the concurrency options that the standard library or language provide: we can write our own or use ones others have written. -We said *almost* everything wasn't in the language, so what is? There are two +We said *almost* everything wasn’t in the language, so what is? There are two traits, both in `std::marker`: `Sync` and `Send`. ### `Send` for Indicating Ownership May Be Transferred to Another Thread @@ -1186,10 +1186,10 @@ some exceptions. One type provided by the standard library that is not `Send` is `Rc`: if we clone an `Rc` value and try to transfer ownership of the clone to another thread, both threads might update the reference count at the same time. As we mentioned in the previous section, `Rc` is implemented for -use in single-threaded situations where you don't want to pay the performance +use in single-threaded situations where you don’t want to pay the performance penalty of having a threadsafe reference count. -Because `Rc` is not marked `Send`, Rust's type system and trait bounds +Because `Rc` is not marked `Send`, Rust’s type system and trait bounds ensure that we can never forget and accidentally send an `Rc` value across threads unsafely. We tried to do this in Listing 16-14, and we got an error that said `the trait Send is not implemented for Rc>`. When we @@ -1197,7 +1197,7 @@ switched to `Arc`, which is `Send`, the code compiled. Any type that is composed entirely of `Send` types is automatically marked as `Send` as well. Almost all primitive types are `Send`, aside from raw pointers, -which we'll discuss in Chapter 19. Most standard library types are `Send`, +which we’ll discuss in Chapter 19. Most standard library types are `Send`, aside from `Rc`. ### `Sync` for Indicating Access from Multiple Threads is Safe @@ -1208,7 +1208,7 @@ is `Sync` if `&T` (a reference to `T`) is `Send` so that the reference can be sent safely to another thread. In a similar manner as `Send`, primitive types are `Sync` and types composed entirely of types that are `Sync` are also `Sync`. -`Rc` is also not `Sync`, for the same reasons that it's not `Send`. +`Rc` is also not `Sync`, for the same reasons that it’s not `Send`. `RefCell` (which we talked about in Chapter 15) and the family of related `Cell` types are not `Sync`. The implementation of the borrow checking at runtime that `RefCell` does is not threadsafe. `Mutex` is `Sync`, and can @@ -1216,21 +1216,21 @@ be used to share access with multiple threads as we saw in the previous section. ### Implementing `Send` and `Sync` Manually is Unsafe -Usually, we don't need to implement the `Send` and `Sync` traits, since types +Usually, we don’t need to implement the `Send` and `Sync` traits, since types that are made up of `Send` and `Sync` traits are automatically also `Send` and -`Sync`. Because they're marker traits, they don't even have any methods to -implement. They're just useful for enforcing concurrency-related invariants. +`Sync`. Because they’re marker traits, they don’t even have any methods to +implement. They’re just useful for enforcing concurrency-related invariants. Implementing the guarantees that these traits are markers for involves -implementing unsafe Rust code. We're going to be talking about using unsafe +implementing unsafe Rust code. We’re going to be talking about using unsafe Rust code in Chapter 19; for now, the important information is that building -new concurrent types that aren't made up of `Send` and `Sync` parts requires +new concurrent types that aren’t made up of `Send` and `Sync` parts requires careful thought to make sure the safety guarantees are upheld. The Nomicon at *https://doc.rust-lang.org/stable/nomicon/vec.html* has more information about these guarantees and how to uphold them. ## Summary -This isn't the last we'll see of concurrency in this book; the project in +This isn’t the last we’ll see of concurrency in this book; the project in Chapter 20 will use these concepts in a more realistic situation than the smaller examples we discussed in this chapter. @@ -1242,12 +1242,12 @@ the current state-of-the-art crates for use in multithreaded situations. Rust provides channels for message passing and smart pointer types like `Mutex` and `Arc` that are safe to use in concurrent contexts. The type system and the borrow checker will make sure the code we write using these -solutions won't have data races or invalid references. Once we get our code +solutions won’t have data races or invalid references. Once we get our code compiling, we can rest assured that our code will happily run on multiple threads without the kinds of hard-to-track-down bugs common in other programming languages. Concurrent programming is no longer something to be afraid of: go forth and make your programs concurrent, fearlessly! -Next, let's talk about idiomatic ways to model problems and structure solutions -as your Rust programs get bigger, and how Rust's idioms relate to those you +Next, let’s talk about idiomatic ways to model problems and structure solutions +as your Rust programs get bigger, and how Rust’s idioms relate to those you might be familiar with from Object Oriented Programming. diff --git a/src/doc/book/second-edition/nostarch/chapter17.md b/src/doc/book/second-edition/nostarch/chapter17.md index 36d683bb4a..3e8ab13f47 100644 --- a/src/doc/book/second-edition/nostarch/chapter17.md +++ b/src/doc/book/second-edition/nostarch/chapter17.md @@ -6,24 +6,24 @@ Object-Oriented Programming is a way of modeling programs that originated with Simula in the 1960s and became popular with C++ in the 1990s. There are many competing definitions for what OOP is: under some definitions, Rust is -object-oriented; under other definitions, Rust is not. In this chapter, we'll +object-oriented; under other definitions, Rust is not. In this chapter, we’ll explore some characteristics that are commonly considered to be object-oriented and how those characteristics translate to idiomatic Rust. ## What Does Object-Oriented Mean? -There isn't consensus in the programming community about the features a +There isn’t consensus in the programming community about the features a language needs to have in order to be called object-oriented. Rust is influenced by many different programming paradigms; we explored the features it has that come from functional programming in Chapter 13. Some of the characteristics that object-oriented programming languages tend to share are -objects, encapsulation, and inheritance. Let's take a look at what each of +objects, encapsulation, and inheritance. Let’s take a look at what each of those mean and whether Rust supports them. ### Objects Contain Data and Behavior -The book "Design Patterns: Elements of Reusable Object-Oriented Software," -colloquially referred to as "The Gang of Four book," is a catalog of +The book “Design Patterns: Elements of Reusable Object-Oriented Software,” +colloquially referred to as “The Gang of Four book,” is a catalog of object-oriented design patterns. It defines object-oriented programming in this way: @@ -33,17 +33,17 @@ way: Under this definition, then, Rust is object-oriented: structs and enums have data and `impl` blocks provide methods on structs and enums. Even though -structs and enums with methods aren't *called* objects, they provide the same -functionality that objects do, using the Gang of Four's definition of objects. +structs and enums with methods aren’t *called* objects, they provide the same +functionality that objects do, using the Gang of Four’s definition of objects. ### Encapsulation that Hides Implementation Details Another aspect commonly associated with object-oriented programming is the idea -of *encapsulation*: the implementation details of an object aren't accessible +of *encapsulation*: the implementation details of an object aren’t accessible to code using that object. The only way to interact with an object is through the public API the object offers; code using the object should not be able to -reach into the object's internals and change data or behavior directly. -Encapsulation enables changing and refactoring an object's internals without +reach into the object’s internals and change data or behavior directly. +Encapsulation enables changing and refactoring an object’s internals without needing to change the code that uses the object. As we discussed in Chapter 7, we can use the `pub` keyword to decide what @@ -52,7 +52,7 @@ default, everything is private. For example, we can define a struct `AveragedCollection` that has a field containing a vector of `i32` values. The struct can also have a field that knows the average of the values in the vector so that whenever anyone wants to know the average of the values that the struct -has in its vector, we don't have to compute it on-demand. `AveragedCollection` +has in its vector, we don’t have to compute it on-demand. `AveragedCollection` will cache the calculated average for us. Listing 17-1 has the definition of the `AveragedCollection` struct: @@ -113,16 +113,16 @@ instance of a `AveragedCollection`. When an item is added to `list` using the `add` method or removed using the `remove` method, the implementations of those methods call the private `update_average` method that takes care of updating the `average` field as well. Because the `list` and `average` fields are -private, there's no way for external code to add or remove items to the `list` +private, there’s no way for external code to add or remove items to the `list` field directly, which could cause the `average` field to get out of sync. The `average` method returns the value in the `average` field, which allows external code to read the `average` but not modify it. -Because we've encapsulated the implementation details of `AveragedCollection`, +Because we’ve encapsulated the implementation details of `AveragedCollection`, we could also change aspects like using a different data structure used for the `list` to use a `HashSet` instead of a `Vec`, for instance. As long as the signatures of the `add`, `remove`, and `average` public methods stayed the same, -code using `AveragedCollection` wouldn't need to change. This wouldn't +code using `AveragedCollection` wouldn’t need to change. This wouldn’t necessarily be the case if we exposed `list` to external code: `HashSet` and `Vec` have different methods for adding and removing items, so the external code would likely have to change if it was modifying `list` directly. @@ -134,15 +134,15 @@ different parts of code enables encapsulation of implementation details. ### Inheritance as a Type System and as Code Sharing *Inheritance* is a mechanism that some programming languages provide whereby an -object can be defined to inherit from another object's definition, thus gaining -the parent object's data and behavior without having to define those again. -Inheritance is a characteristic that is part of some people's definitions of +object can be defined to inherit from another object’s definition, thus gaining +the parent object’s data and behavior without having to define those again. +Inheritance is a characteristic that is part of some people’s definitions of what an OOP language is. If a language must have inheritance to be an object-oriented language, then Rust is not object-oriented. There is not a way to define a struct that -inherits from another struct in order to gain the parent struct's fields and -method implementations. However, if you're used to having inheritance in your +inherits from another struct in order to gain the parent struct’s fields and +method implementations. However, if you’re used to having inheritance in your programming toolbox, there are other solutions in Rust depending on the reason you want to use inheritance. @@ -167,12 +167,12 @@ substituted for each other at runtime if they have the same shape. -> While many people use "polymorphism" to describe inheritance, it's actually -> a specific kind of polymorphism, called "sub-type polymorphism." There are +> While many people use “polymorphism” to describe inheritance, it’s actually +> a specific kind of polymorphism, called “sub-type polymorphism.” There are > other forms as well; a generic parameter with a trait bound in Rust is -> also polymorphism, more specifically "parametric polymorphism." The exact -> details between the different kinds of polymorphism aren't crucial here, -> so don't worry too much about the details: just know that Rust has multiple +> also polymorphism, more specifically “parametric polymorphism.” The exact +> details between the different kinds of polymorphism aren’t crucial here, +> so don’t worry too much about the details: just know that Rust has multiple > polymorphism-related features, unlike many OOP languages. @@ -183,17 +183,17 @@ a particular trait. Inheritance has recently fallen out of favor as a programming design solution in many programming languages. Using inheritance to re-use some code can -require more code to be shared than you actually need. Subclasses shouldn't +require more code to be shared than you actually need. Subclasses shouldn’t always share all characteristics of their parent class, but inheritance means -the subclass gets all of its parent's data and behavior. This can make a -program's design less flexible, and creates the possibility of calling methods -on subclasses that don't make sense or cause errors since the methods don't +the subclass gets all of its parent’s data and behavior. This can make a +program’s design less flexible, and creates the possibility of calling methods +on subclasses that don’t make sense or cause errors since the methods don’t apply to the subclass but must be inherited from the parent class. In addition, some languages only allow a subclass to inherit from one class, further -restricting the flexibility of a program's design. +restricting the flexibility of a program’s design. For these reasons, Rust chose to take a different approach with trait objects -instead of inheritance. Let's take a look at how trait objects enable +instead of inheritance. Let’s take a look at how trait objects enable polymorphism in Rust. ## Trait Objects for Using Values of Different Types @@ -214,19 +214,19 @@ Sometimes we want the set of types that we use to be extensible by the programmers who use our library. For example, many Graphical User Interface tools have a concept of a list of items that get drawn on the screen by iterating through the list and calling a `draw` method on each of the items. -We're going to create a library crate containing the structure of a GUI library +We’re going to create a library crate containing the structure of a GUI library called `rust_gui`. Our GUI library could include some types for people to use, such as `Button` or `TextField`. Programmers that use `rust_gui` will want to create more types that can be drawn on the screen: one programmer might add an -`Image`, while another might add a `SelectBox`. We're not going to implement a +`Image`, while another might add a `SelectBox`. We’re not going to implement a fully-fledged GUI library in this chapter, but we will show how the pieces would fit together. -When we're writing the `rust_gui` library, we don't know all the types that -other programmers will want to create, so we can't define an `enum` containing +When we’re writing the `rust_gui` library, we don’t know all the types that +other programmers will want to create, so we can’t define an `enum` containing all the types. What we do know is that `rust_gui` needs to be able to keep track of a bunch of values of all these different types, and it needs to be -able to call a `draw` method on each of these values. Our GUI library doesn't +able to call a `draw` method on each of these values. Our GUI library doesn’t need to know what will happen exactly when we call the `draw` method, just that the value will have that method available for us to call. @@ -239,26 +239,26 @@ instances and call `draw` on them. ### Defining a Trait for the Common Behavior -In Rust, though, we can define a trait that we'll name `Draw` and that will +In Rust, though, we can define a trait that we’ll name `Draw` and that will have one method named `draw`. Then we can define a vector that takes a *trait object*, which is a trait behind some sort of pointer, such as a `&` reference or a `Box` smart pointer. -We mentioned that we don't call structs and enums "objects" to distinguish -structs and enums from other languages' objects. The data in the struct or enum +We mentioned that we don’t call structs and enums “objects” to distinguish +structs and enums from other languages’ objects. The data in the struct or enum fields and the behavior in `impl` blocks is separated, as opposed to other languages that have data and behavior combined into one concept called an object. Trait objects *are* more like objects in other languages, in the sense that they combine the data made up of the pointer to a concrete object with the behavior of the methods defined in the trait. However, trait objects are -different from objects in other languages because we can't add data to a trait -object. Trait objects aren't as generally useful as objects in other languages: +different from objects in other languages because we can’t add data to a trait +object. Trait objects aren’t as generally useful as objects in other languages: their purpose is to allow abstraction across common behavior. A trait defines behavior that we need in a given situation. We can then use a trait as a trait object in places where we would use a concrete type or a -generic type. Rust's type system will ensure that any value we substitute in -for the trait object will implement the methods of the trait. Then we don't +generic type. Rust’s type system will ensure that any value we substitute in +for the trait object will implement the methods of the trait. Then we don’t need to know all the possible types at compile time, and we can treat all the instances the same way. Listing 17-3 shows how to define a trait named `Draw` with one method named `draw`: @@ -276,7 +276,7 @@ Listing 17-3: Definition of the `Draw` trait This should look familiar since we talked about how to define traits in Chapter 10. Next comes something new: Listing 17-4 has the definition of a struct named `Screen` that holds a vector named `components` that are of type -`Box`. That `Box` is a trait object: it's a stand-in for any type +`Box`. That `Box` is a trait object: it’s a stand-in for any type inside a `Box` that implements the `Draw` trait. Filename: src/lib.rs @@ -290,7 +290,7 @@ pub struct Screen { Listing 17-4: Definition of the `Screen` struct with a `components` field that holds a vector of trait objects that implement the `Draw` trait -On the `Screen` struct, we'll define a method named `run`, which will call the +On the `Screen` struct, we’ll define a method named `run`, which will call the `draw` method on each of its `components` as shown in Listing 17-5: Filename: src/lib.rs @@ -335,20 +335,20 @@ Listing 17-6: An alternate implementation of the `Screen` struct and its `run` method using generics and trait bounds This only lets us have a `Screen` instance that has a list of components that -are all of type `Button` or all of type `TextField`. If you'll only ever have +are all of type `Button` or all of type `TextField`. If you’ll only ever have homogeneous collections, using generics and trait bounds is preferable since the definitions will be monomorphized at compile time to use the concrete types. With the definition of `Screen` that holds a component list of trait objects in `Vec>` instead, one `Screen` instance can hold a `Vec` that contains -a `Box